copy project

master
Sascha 2024-12-12 12:07:41 +07:00
parent 8127e1ead4
commit 834f0845bd
1488 changed files with 380068 additions and 1 deletions

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>

@ -0,0 +1,13 @@
# Default ignored files
/shelf/
/workspace.xml
# Rider ignored files
/.idea.project.iml
/contentModel.xml
/modules.xml
/projectSettingsUpdater.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DataSourceManagerImpl" format="xml" multifile-model="true">
<data-source source="LOCAL" name="eeao.db" uuid="d95dab00-6f4d-47f3-ab98-4b96ee53f77f">
<driver-ref>sqlite.xerial</driver-ref>
<synchronize>true</synchronize>
<jdbc-driver>org.sqlite.JDBC</jdbc-driver>
<jdbc-url>jdbc:sqlite:$PROJECT_DIR$/eeao.db</jdbc-url>
<working-dir>$ProjectFileDir$</working-dir>
</data-source>
</component>
</project>

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="UserContentModel">
<attachedFolders />
<explicitIncludes />
<explicitExcludes />
</component>
</project>

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Black">
<option name="sdkName" value="Python 3.12 virtualenv at ~/coding/cs50/project/env" />
</component>
</project>

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="SqlDialectMappings">
<file url="PROJECT" dialect="SQLite" />
</component>
</project>

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

@ -2,7 +2,7 @@
## Video Demo
will come soon
## Git
https://git.woitschetzki.de/Gremlinsoft/cs50/src/branch/master/project
## Description
A new design for our orchestra homepage with registration for access to pictures and recorded music
## TODOs

@ -0,0 +1,119 @@
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required
# Configure application
app = Flask(__name__)
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///eeao.db")
@app.after_request
def after_request(response):
"""Ensure responses aren't cached"""
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
@app.route("/")
@login_required
def index():
# Get actual cash of the logged-in user
user = db.execute("SELECT * FROM users WHERE id = ?", session["user_id"])
return render_template("index.html", user=user[0])
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 400)
username = request.form.get("username")
# Ensure password was submitted
if not request.form.get("password"):
return apology("must provide password", 400)
password = request.form.get("password")
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", username)
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], password):
return apology("invalid username and/or password", 400)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/register", methods=["GET", "POST"])
def register():
"""Register user"""
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
username = request.form.get("username")
# Ensure username was submitted
if not username:
return apology("must provide username", 400)
# Check if user already existing
if db.execute("SELECT * FROM users WHERE username = ?", username):
return apology("user already exists", 400)
password = request.form.get("password")
# Ensure password was submitted
if not password:
return apology("must provide password", 400)
confirmation = request.form.get("confirmation")
# Ensure password was submitted
if not confirmation:
# Query database for username
return apology("must provide confirmation", 400)
# Ensure password is identical with confirmation
if password != confirmation:
return apology("password and confirmation does not match", 400)
# Add user to db
password_hash = generate_password_hash(password)
db.execute("INSERT INTO users (username, hash) VALUES(?, ?)", username, password_hash)
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("register.html")

Binary file not shown.

@ -0,0 +1,8 @@
#!/home/sascha/coding/cs50/project/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from flask.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -0,0 +1,8 @@
#!/home/sascha/coding/cs50/project/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from charset_normalizer.cli import cli_detect
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli_detect())

@ -0,0 +1,8 @@
#!/home/sascha/coding/cs50/project/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from sqlparse.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -0,0 +1,8 @@
#!/home/sascha/coding/cs50/project/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -0,0 +1,164 @@
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
/* Greenlet object interface */
#ifndef Py_GREENLETOBJECT_H
#define Py_GREENLETOBJECT_H
#include <Python.h>
#ifdef __cplusplus
extern "C" {
#endif
/* This is deprecated and undocumented. It does not change. */
#define GREENLET_VERSION "1.0.0"
#ifndef GREENLET_MODULE
#define implementation_ptr_t void*
#endif
typedef struct _greenlet {
PyObject_HEAD
PyObject* weakreflist;
PyObject* dict;
implementation_ptr_t pimpl;
} PyGreenlet;
#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type))
/* C API functions */
/* Total number of symbols that are exported */
#define PyGreenlet_API_pointers 12
#define PyGreenlet_Type_NUM 0
#define PyExc_GreenletError_NUM 1
#define PyExc_GreenletExit_NUM 2
#define PyGreenlet_New_NUM 3
#define PyGreenlet_GetCurrent_NUM 4
#define PyGreenlet_Throw_NUM 5
#define PyGreenlet_Switch_NUM 6
#define PyGreenlet_SetParent_NUM 7
#define PyGreenlet_MAIN_NUM 8
#define PyGreenlet_STARTED_NUM 9
#define PyGreenlet_ACTIVE_NUM 10
#define PyGreenlet_GET_PARENT_NUM 11
#ifndef GREENLET_MODULE
/* This section is used by modules that uses the greenlet C API */
static void** _PyGreenlet_API = NULL;
# define PyGreenlet_Type \
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
# define PyExc_GreenletError \
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
# define PyExc_GreenletExit \
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
/*
* PyGreenlet_New(PyObject *args)
*
* greenlet.greenlet(run, parent=None)
*/
# define PyGreenlet_New \
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
_PyGreenlet_API[PyGreenlet_New_NUM])
/*
* PyGreenlet_GetCurrent(void)
*
* greenlet.getcurrent()
*/
# define PyGreenlet_GetCurrent \
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
/*
* PyGreenlet_Throw(
* PyGreenlet *greenlet,
* PyObject *typ,
* PyObject *val,
* PyObject *tb)
*
* g.throw(...)
*/
# define PyGreenlet_Throw \
(*(PyObject * (*)(PyGreenlet * self, \
PyObject * typ, \
PyObject * val, \
PyObject * tb)) \
_PyGreenlet_API[PyGreenlet_Throw_NUM])
/*
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
*
* g.switch(*args, **kwargs)
*/
# define PyGreenlet_Switch \
(*(PyObject * \
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
_PyGreenlet_API[PyGreenlet_Switch_NUM])
/*
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
*
* g.parent = new_parent
*/
# define PyGreenlet_SetParent \
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
_PyGreenlet_API[PyGreenlet_SetParent_NUM])
/*
* PyGreenlet_GetParent(PyObject* greenlet)
*
* return greenlet.parent;
*
* This could return NULL even if there is no exception active.
* If it does not return NULL, you are responsible for decrementing the
* reference count.
*/
# define PyGreenlet_GetParent \
(*(PyGreenlet* (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_GET_PARENT_NUM])
/*
* deprecated, undocumented alias.
*/
# define PyGreenlet_GET_PARENT PyGreenlet_GetParent
# define PyGreenlet_MAIN \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_MAIN_NUM])
# define PyGreenlet_STARTED \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_STARTED_NUM])
# define PyGreenlet_ACTIVE \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_ACTIVE_NUM])
/* Macro that imports greenlet and initializes C API */
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
keep the older definition to be sure older code that might have a copy of
the header still works. */
# define PyGreenlet_Import() \
{ \
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
}
#endif /* GREENLET_MODULE */
#ifdef __cplusplus
}
#endif
#endif /* !Py_GREENLETOBJECT_H */

@ -0,0 +1,28 @@
Copyright 2010 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -0,0 +1,92 @@
Metadata-Version: 2.1
Name: MarkupSafe
Version: 3.0.2
Summary: Safely add untrusted strings to HTML/XML markup.
Maintainer-email: Pallets <contact@palletsprojects.com>
License: Copyright 2010 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Project-URL: Donate, https://palletsprojects.com/donate
Project-URL: Documentation, https://markupsafe.palletsprojects.com/
Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
Project-URL: Source, https://github.com/pallets/markupsafe/
Project-URL: Chat, https://discord.gg/pallets
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
Classifier: Topic :: Text Processing :: Markup :: HTML
Classifier: Typing :: Typed
Requires-Python: >=3.9
Description-Content-Type: text/markdown
License-File: LICENSE.txt
# MarkupSafe
MarkupSafe implements a text object that escapes characters so it is
safe to use in HTML and XML. Characters that have special meanings are
replaced so that they display as the actual characters. This mitigates
injection attacks, meaning untrusted user input can safely be displayed
on a page.
## Examples
```pycon
>>> from markupsafe import Markup, escape
>>> # escape replaces special characters and wraps in Markup
>>> escape("<script>alert(document.cookie);</script>")
Markup('&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
>>> # wrap in Markup to mark text "safe" and prevent escaping
>>> Markup("<strong>Hello</strong>")
Markup('<strong>hello</strong>')
>>> escape(Markup("<strong>Hello</strong>"))
Markup('<strong>hello</strong>')
>>> # Markup is a str subclass
>>> # methods and operators escape their arguments
>>> template = Markup("Hello <em>{name}</em>")
>>> template.format(name='"World"')
Markup('Hello <em>&#34;World&#34;</em>')
```
## Donate
The Pallets organization develops and supports MarkupSafe and other
popular packages. In order to grow the community of contributors and
users, and allow the maintainers to devote more time to the projects,
[please donate today][].
[please donate today]: https://palletsprojects.com/donate

@ -0,0 +1,14 @@
MarkupSafe-3.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
MarkupSafe-3.0.2.dist-info/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
MarkupSafe-3.0.2.dist-info/METADATA,sha256=aAwbZhSmXdfFuMM-rEHpeiHRkBOGESyVLJIuwzHP-nw,3975
MarkupSafe-3.0.2.dist-info/RECORD,,
MarkupSafe-3.0.2.dist-info/WHEEL,sha256=OVgtqZzfzIXXtylXP90gxCZ6CKBCwKYyHM8PpMEjN1M,151
MarkupSafe-3.0.2.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
markupsafe/__init__.py,sha256=sr-U6_27DfaSrj5jnHYxWN-pvhM27sjlDplMDPZKm7k,13214
markupsafe/__pycache__/__init__.cpython-312.pyc,,
markupsafe/__pycache__/_native.cpython-312.pyc,,
markupsafe/_native.py,sha256=hSLs8Jmz5aqayuengJJ3kdT5PwNpBWpKrmQSdipndC8,210
markupsafe/_speedups.c,sha256=O7XulmTo-epI6n2FtMVOrJXl8EAaIwD2iNYmBI5SEoQ,4149
markupsafe/_speedups.cpython-312-x86_64-linux-gnu.so,sha256=t1DBZlpsjFA30BOOvXfXfT1wvO_4cS16VbHz1-49q5U,43432
markupsafe/_speedups.pyi,sha256=ENd1bYe7gbBUf2ywyYWOGUpnXOHNJ-cgTNqetlW8h5k,41
markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: setuptools (75.2.0)
Root-Is-Purelib: false
Tag: cp312-cp312-manylinux_2_17_x86_64
Tag: cp312-cp312-manylinux2014_x86_64

@ -0,0 +1,19 @@
Copyright 2005-2024 SQLAlchemy authors and contributors <see AUTHORS file>.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,243 @@
Metadata-Version: 2.1
Name: SQLAlchemy
Version: 2.0.36
Summary: Database Abstraction Library
Home-page: https://www.sqlalchemy.org
Author: Mike Bayer
Author-email: mike_mp@zzzcomputing.com
License: MIT
Project-URL: Documentation, https://docs.sqlalchemy.org
Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Database :: Front-Ends
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
License-File: LICENSE
Requires-Dist: typing-extensions >=4.6.0
Requires-Dist: greenlet !=0.4.17 ; python_version < "3.13" and (platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32"))))))
Requires-Dist: importlib-metadata ; python_version < "3.8"
Provides-Extra: aiomysql
Requires-Dist: greenlet !=0.4.17 ; extra == 'aiomysql'
Requires-Dist: aiomysql >=0.2.0 ; extra == 'aiomysql'
Provides-Extra: aioodbc
Requires-Dist: greenlet !=0.4.17 ; extra == 'aioodbc'
Requires-Dist: aioodbc ; extra == 'aioodbc'
Provides-Extra: aiosqlite
Requires-Dist: greenlet !=0.4.17 ; extra == 'aiosqlite'
Requires-Dist: aiosqlite ; extra == 'aiosqlite'
Requires-Dist: typing-extensions !=3.10.0.1 ; extra == 'aiosqlite'
Provides-Extra: asyncio
Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncio'
Provides-Extra: asyncmy
Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncmy'
Requires-Dist: asyncmy !=0.2.4,!=0.2.6,>=0.2.3 ; extra == 'asyncmy'
Provides-Extra: mariadb_connector
Requires-Dist: mariadb !=1.1.10,!=1.1.2,!=1.1.5,>=1.0.1 ; extra == 'mariadb_connector'
Provides-Extra: mssql
Requires-Dist: pyodbc ; extra == 'mssql'
Provides-Extra: mssql_pymssql
Requires-Dist: pymssql ; extra == 'mssql_pymssql'
Provides-Extra: mssql_pyodbc
Requires-Dist: pyodbc ; extra == 'mssql_pyodbc'
Provides-Extra: mypy
Requires-Dist: mypy >=0.910 ; extra == 'mypy'
Provides-Extra: mysql
Requires-Dist: mysqlclient >=1.4.0 ; extra == 'mysql'
Provides-Extra: mysql_connector
Requires-Dist: mysql-connector-python ; extra == 'mysql_connector'
Provides-Extra: oracle
Requires-Dist: cx-oracle >=8 ; extra == 'oracle'
Provides-Extra: oracle_oracledb
Requires-Dist: oracledb >=1.0.1 ; extra == 'oracle_oracledb'
Provides-Extra: postgresql
Requires-Dist: psycopg2 >=2.7 ; extra == 'postgresql'
Provides-Extra: postgresql_asyncpg
Requires-Dist: greenlet !=0.4.17 ; extra == 'postgresql_asyncpg'
Requires-Dist: asyncpg ; extra == 'postgresql_asyncpg'
Provides-Extra: postgresql_pg8000
Requires-Dist: pg8000 >=1.29.1 ; extra == 'postgresql_pg8000'
Provides-Extra: postgresql_psycopg
Requires-Dist: psycopg >=3.0.7 ; extra == 'postgresql_psycopg'
Provides-Extra: postgresql_psycopg2binary
Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary'
Provides-Extra: postgresql_psycopg2cffi
Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi'
Provides-Extra: postgresql_psycopgbinary
Requires-Dist: psycopg[binary] >=3.0.7 ; extra == 'postgresql_psycopgbinary'
Provides-Extra: pymysql
Requires-Dist: pymysql ; extra == 'pymysql'
Provides-Extra: sqlcipher
Requires-Dist: sqlcipher3-binary ; extra == 'sqlcipher'
SQLAlchemy
==========
|PyPI| |Python| |Downloads|
.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI
.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI - Python Version
.. |Downloads| image:: https://static.pepy.tech/badge/sqlalchemy/month
:target: https://pepy.tech/project/sqlalchemy
:alt: PyPI - Downloads
The Python SQL Toolkit and Object Relational Mapper
Introduction
-------------
SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
that gives application developers the full power and
flexibility of SQL. SQLAlchemy provides a full suite
of well known enterprise-level persistence patterns,
designed for efficient and high-performing database
access, adapted into a simple and Pythonic domain
language.
Major SQLAlchemy features include:
* An industrial strength ORM, built
from the core on the identity map, unit of work,
and data mapper patterns. These patterns
allow transparent persistence of objects
using a declarative configuration system.
Domain models
can be constructed and manipulated naturally,
and changes are synchronized with the
current transaction automatically.
* A relationally-oriented query system, exposing
the full range of SQL's capabilities
explicitly, including joins, subqueries,
correlation, and most everything else,
in terms of the object model.
Writing queries with the ORM uses the same
techniques of relational composition you use
when writing SQL. While you can drop into
literal SQL at any time, it's virtually never
needed.
* A comprehensive and flexible system
of eager loading for related collections and objects.
Collections are cached within a session,
and can be loaded on individual access, all
at once using joins, or by query per collection
across the full result set.
* A Core SQL construction system and DBAPI
interaction layer. The SQLAlchemy Core is
separate from the ORM and is a full database
abstraction layer in its own right, and includes
an extensible Python-based SQL expression
language, schema metadata, connection pooling,
type coercion, and custom types.
* All primary and foreign key constraints are
assumed to be composite and natural. Surrogate
integer primary keys are of course still the
norm, but SQLAlchemy never assumes or hardcodes
to this model.
* Database introspection and generation. Database
schemas can be "reflected" in one step into
Python structures representing database metadata;
those same structures can then generate
CREATE statements right back out - all within
the Core, independent of the ORM.
SQLAlchemy's philosophy:
* SQL databases behave less and less like object
collections the more size and performance start to
matter; object collections behave less and less like
tables and rows the more abstraction starts to matter.
SQLAlchemy aims to accommodate both of these
principles.
* An ORM doesn't need to hide the "R". A relational
database provides rich, set-based functionality
that should be fully exposed. SQLAlchemy's
ORM provides an open-ended set of patterns
that allow a developer to construct a custom
mediation layer between a domain model and
a relational schema, turning the so-called
"object relational impedance" issue into
a distant memory.
* The developer, in all cases, makes all decisions
regarding the design, structure, and naming conventions
of both the object model as well as the relational
schema. SQLAlchemy only provides the means
to automate the execution of these decisions.
* With SQLAlchemy, there's no such thing as
"the ORM generated a bad query" - you
retain full control over the structure of
queries, including how joins are organized,
how subqueries and correlation is used, what
columns are requested. Everything SQLAlchemy
does is ultimately the result of a developer-initiated
decision.
* Don't use an ORM if the problem doesn't need one.
SQLAlchemy consists of a Core and separate ORM
component. The Core offers a full SQL expression
language that allows Pythonic construction
of SQL constructs that render directly to SQL
strings for a target database, returning
result sets that are essentially enhanced DBAPI
cursors.
* Transactions should be the norm. With SQLAlchemy's
ORM, nothing goes to permanent storage until
commit() is called. SQLAlchemy encourages applications
to create a consistent means of delineating
the start and end of a series of operations.
* Never render a literal value in a SQL statement.
Bound parameters are used to the greatest degree
possible, allowing query optimizers to cache
query plans effectively and making SQL injection
attacks a non-issue.
Documentation
-------------
Latest documentation is at:
https://www.sqlalchemy.org/docs/
Installation / Requirements
---------------------------
Full documentation for installation is at
`Installation <https://www.sqlalchemy.org/docs/intro.html#installation>`_.
Getting Help / Development / Bug reporting
------------------------------------------
Please refer to the `SQLAlchemy Community Guide <https://www.sqlalchemy.org/support.html>`_.
Code of Conduct
---------------
Above all, SQLAlchemy places great emphasis on polite, thoughtful, and
constructive communication between users and developers.
Please see our current Code of Conduct at
`Code of Conduct <https://www.sqlalchemy.org/codeofconduct.html>`_.
License
-------
SQLAlchemy is distributed under the `MIT license
<https://www.opensource.org/licenses/mit-license.php>`_.

@ -0,0 +1,529 @@
SQLAlchemy-2.0.36.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
SQLAlchemy-2.0.36.dist-info/LICENSE,sha256=PA9Zq4h9BB3mpOUv_j6e212VIt6Qn66abNettue-MpM,1100
SQLAlchemy-2.0.36.dist-info/METADATA,sha256=EZH514FydYtyOhgoZk_OF1ZQEtI4eTAEddlnUlRjzac,9692
SQLAlchemy-2.0.36.dist-info/RECORD,,
SQLAlchemy-2.0.36.dist-info/WHEEL,sha256=7B4nnId14TToQHuAKpxbDLCJbNciqBsV-mvXE2hVLJc,151
SQLAlchemy-2.0.36.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
sqlalchemy/__init__.py,sha256=J2PsdiJiNW93Etxk6YN8o_C3TcpR1_DckU71r4LBcGE,13033
sqlalchemy/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/__pycache__/events.cpython-312.pyc,,
sqlalchemy/__pycache__/exc.cpython-312.pyc,,
sqlalchemy/__pycache__/inspection.cpython-312.pyc,,
sqlalchemy/__pycache__/log.cpython-312.pyc,,
sqlalchemy/__pycache__/schema.cpython-312.pyc,,
sqlalchemy/__pycache__/types.cpython-312.pyc,,
sqlalchemy/connectors/__init__.py,sha256=PzXPqZqi3BzEnrs1eW0DcsR4lyknAzhhN9rWcQ97hb4,476
sqlalchemy/connectors/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/connectors/__pycache__/aioodbc.cpython-312.pyc,,
sqlalchemy/connectors/__pycache__/asyncio.cpython-312.pyc,,
sqlalchemy/connectors/__pycache__/pyodbc.cpython-312.pyc,,
sqlalchemy/connectors/aioodbc.py,sha256=GSTiNMO9h0qjPxgqaxDwWZ8HvhWMFNVR6MJQnN1oc40,5288
sqlalchemy/connectors/asyncio.py,sha256=Hq2bkXmG6-KO_RfCrwMqx4oGH-uH1Z1WWKqPWNjz8p4,6138
sqlalchemy/connectors/pyodbc.py,sha256=t7AjyxIOnaWg3CrlUEpBs4Y5l0HFdNt3P_cSSKhbi0Y,8501
sqlalchemy/cyextension/__init__.py,sha256=GzhhN8cjMnDTE0qerlUlpbrNmFPHQWCZ4Gk74OAxl04,244
sqlalchemy/cyextension/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/cyextension/collections.cpython-312-x86_64-linux-gnu.so,sha256=ofziMIrcxCV-AGNBEvHL7QorRR2SPA9bkQj_k3WLD9E,1932256
sqlalchemy/cyextension/collections.pyx,sha256=L7DZ3DGKpgw2MT2ZZRRxCnrcyE5pU1NAFowWgAzQPEc,12571
sqlalchemy/cyextension/immutabledict.cpython-312-x86_64-linux-gnu.so,sha256=7SNuSRYPX4hzKqjAtbxJT2xLwa6Aegqtjfc9MYYZV0w,805632
sqlalchemy/cyextension/immutabledict.pxd,sha256=3x3-rXG5eRQ7bBnktZ-OJ9-6ft8zToPmTDOd92iXpB0,291
sqlalchemy/cyextension/immutabledict.pyx,sha256=KfDTYbTfebstE8xuqAtuXsHNAK0_b5q_ymUiinUe_xs,3535
sqlalchemy/cyextension/processors.cpython-312-x86_64-linux-gnu.so,sha256=hBeVC8lWoCgLmD5det5fcoL4V8LYT9Cu8TRLaAzWeW0,530680
sqlalchemy/cyextension/processors.pyx,sha256=R1rHsGLEaGeBq5VeCydjClzYlivERIJ9B-XLOJlf2MQ,1792
sqlalchemy/cyextension/resultproxy.cpython-312-x86_64-linux-gnu.so,sha256=n6E3F0rPZFVUnpAm1pr3T-Rc8ZMqUcLjdRoOetVvJ8M,621328
sqlalchemy/cyextension/resultproxy.pyx,sha256=eWLdyBXiBy_CLQrF5ScfWJm7X0NeelscSXedtj1zv9Q,2725
sqlalchemy/cyextension/util.cpython-312-x86_64-linux-gnu.so,sha256=IbH9FP4ihbuGMhZ95oiRo_6F2wkSAlE2YDzegCVqErg,950928
sqlalchemy/cyextension/util.pyx,sha256=B85orxa9LddLuQEaDoVSq1XmAXIbLKxrxpvuB8ogV_o,2530
sqlalchemy/dialects/__init__.py,sha256=Kos9Gf5JZg1Vg6GWaCqEbD6e0r1jCwCmcnJIfcxDdcY,1770
sqlalchemy/dialects/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc,,
sqlalchemy/dialects/_typing.py,sha256=hyv0nKucX2gI8ispB1IsvaUgrEPn9zEcq9hS7kfstEw,888
sqlalchemy/dialects/mssql/__init__.py,sha256=r5t8wFRNtBQoiUWh0WfIEWzXZW6f3D0uDt6NZTW_7Cc,1880
sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/aioodbc.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/json.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-312.pyc,,
sqlalchemy/dialects/mssql/aioodbc.py,sha256=UQd9ecSMIML713TDnLAviuBVJle7P7i1FtqGZZePk2Y,2022
sqlalchemy/dialects/mssql/base.py,sha256=msl_N_a_z8ali7Nthx55AGoV7b5wakCWvWu560BvH9o,132423
sqlalchemy/dialects/mssql/information_schema.py,sha256=HswjDc6y0mPXCf_x6VyylHlBdBa4PSY6Evxmmlch700,8084
sqlalchemy/dialects/mssql/json.py,sha256=evUACW2O62TAPq8B7QIPagz7jfc664ql9ms68JqiYzg,4816
sqlalchemy/dialects/mssql/provision.py,sha256=ZAtt6Div9NLIngMs8kyloxfphw0KDNMsnRCAVd7-esE,5593
sqlalchemy/dialects/mssql/pymssql.py,sha256=LAv43q4vBCB85OsAwHQItaQUYTYIO0QJ-jvzaBrswmY,4097
sqlalchemy/dialects/mssql/pyodbc.py,sha256=vwM-vBlmRwrqxOc73P0sFOrBSwn24wzc5IkEOpalbXQ,27056
sqlalchemy/dialects/mysql/__init__.py,sha256=bxbi4hkysUK2OOVvr1F49akUj1cky27kKb07tgFzI9U,2153
sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/dml.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/expression.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/json.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/types.cpython-312.pyc,,
sqlalchemy/dialects/mysql/aiomysql.py,sha256=-oMZnCqNsSki8mlQRTWIwiQPT1OVdZIuANkb90q8LAs,9999
sqlalchemy/dialects/mysql/asyncmy.py,sha256=YpuuOh8VknEeqHqUXQGfQ3jhfO3Xb-vZv78Jq5cscJ0,10067
sqlalchemy/dialects/mysql/base.py,sha256=giGlZNGrKsNMoSkbzY0PGgfamKjA9rOkSq1o5vKvno4,122755
sqlalchemy/dialects/mysql/cymysql.py,sha256=eXT1ry0w_qRxjiO24M980c-8PZ9qSsbhqBHntjEiKB0,2300
sqlalchemy/dialects/mysql/dml.py,sha256=HXJMAvimJsqvhj3UZO4vW_6LkF5RqaKbHvklAjor7yU,7645
sqlalchemy/dialects/mysql/enumerated.py,sha256=ipEPPQqoXfFwcywNdcLlZCEzHBtnitHRah1Gn6nItcg,8448
sqlalchemy/dialects/mysql/expression.py,sha256=lsmQCHKwfPezUnt27d2kR6ohk4IRFCA64KBS16kx5dc,4097
sqlalchemy/dialects/mysql/json.py,sha256=l6MEZ0qp8FgiRrIQvOMhyEJq0q6OqiEnvDTx5Cbt9uQ,2269
sqlalchemy/dialects/mysql/mariadb.py,sha256=kTfBLioLKk4JFFst4TY_iWqPtnvvQXFHknLfm89H2N8,853
sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=_S1aV93kyP52Nvj7HR9weThML4oUvSLsLqiVFdoLR2o,8623
sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=oq3mtsNOMldUjs32JbJG2u3Hy3DObyVzUUMYfOkwkHg,5729
sqlalchemy/dialects/mysql/mysqldb.py,sha256=qUBbA6STeYGozutyTxHCo5p1W3p59QFFS2FwCgPrjBA,9503
sqlalchemy/dialects/mysql/provision.py,sha256=Jnk8UO9_Apd2odR2IQFLrscCfAmYxuBKcB8giS3bBog,3575
sqlalchemy/dialects/mysql/pymysql.py,sha256=GUnSHd2M2uKjmN46Hheymtm26g7phEgwYOXrX0zLY8M,4083
sqlalchemy/dialects/mysql/pyodbc.py,sha256=072crI4qVyPhajYvHnsfFeSrNjLFVPIjBQKo5uyz5yk,4297
sqlalchemy/dialects/mysql/reflection.py,sha256=3u34YwT1JJh3uThGZJZ3FKdnUcT7v08QB-tAl1r7VRk,22834
sqlalchemy/dialects/mysql/reserved_words.py,sha256=ucKX2p2c3UnMq2ayZuOHuf73eXhu7SKsOsTlIN1Q83I,9258
sqlalchemy/dialects/mysql/types.py,sha256=L5cTCsMT1pTedszNEM3jSxFNZEMcHQLprYCZ0vmfsnA,24343
sqlalchemy/dialects/oracle/__init__.py,sha256=p4-2gw7TT0bX_MoJXTGD4i8WHctYsK9kCRbkpzykBrc,1493
sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/dictionary.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/oracledb.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/types.cpython-312.pyc,,
sqlalchemy/dialects/oracle/base.py,sha256=zLMZedrr6j1LvJz4qYnoSjikI5RZY92YFeQHiZ_YvW0,119676
sqlalchemy/dialects/oracle/cx_oracle.py,sha256=q8Nyj15UZCE2TWOmxuWp5ZsxiCiGMzqfd_9UkmjIja0,55235
sqlalchemy/dialects/oracle/dictionary.py,sha256=7WMrbPkqo8ZdGjaEZyQr-5f2pajSOF1OTGb8P97z8-g,19519
sqlalchemy/dialects/oracle/oracledb.py,sha256=fZRKGqNIwW9LG4i8yDOXABrucbfzn_yC86Od-BJ3PcM,13619
sqlalchemy/dialects/oracle/provision.py,sha256=O9ZpF4OG6Cx4mMzLRfZwhs8dZjrJETWR402n9c7726A,8304
sqlalchemy/dialects/oracle/types.py,sha256=QK3hJvWzKnnCe3oD3rItwEEIwcoBze8qGg7VFOvVlIk,8231
sqlalchemy/dialects/postgresql/__init__.py,sha256=wwnNAq4wDQzrlPRzDNB06ayuq3L2HNO99nzeEvq-YcU,3892
sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/_psycopg_common.py,sha256=7TudtgsPiSB8O5kX8W8KxcNYR8t5h_UHb86b_ChL0P8,5696
sqlalchemy/dialects/postgresql/array.py,sha256=bWcame7ntmI_Kx6gmBX0-chwADFdLHeCvaDQ4iX8id8,13734
sqlalchemy/dialects/postgresql/asyncpg.py,sha256=9P0Itn9eeSBu67kGSsHuzx8xd4YYwRKdiZ5m7bF5onU,41074
sqlalchemy/dialects/postgresql/base.py,sha256=dGPsaV3Esw6-AwE3QcgHF0Fray3Yw5-gLLgCvgdxvS0,179083
sqlalchemy/dialects/postgresql/dml.py,sha256=Pc69Le6qzmUHHb1FT5zeUSD31dWm6SBgdCAGW89cs3s,11212
sqlalchemy/dialects/postgresql/ext.py,sha256=1bZ--iNh2O9ym7l2gXZX48yP3yMO4dqb9RpYro2Mj2Q,16262
sqlalchemy/dialects/postgresql/hstore.py,sha256=otAx-RTDfpi_tcXkMuQV0JOIXtYgevgnsikLKKOkI6U,11541
sqlalchemy/dialects/postgresql/json.py,sha256=53rQWon9cUXd1yCjIvUpJjWwNyRSy3U7Kz0HV70ftrc,11618
sqlalchemy/dialects/postgresql/named_types.py,sha256=3IV1ufo7zJjKmX4VtGDEnoXE6xEqLJAtGG82IiqHXwY,17594
sqlalchemy/dialects/postgresql/operators.py,sha256=NsAaWun_tL3d_be0fs9YL6T4LPKK6crnmFxxIJHgyeY,2808
sqlalchemy/dialects/postgresql/pg8000.py,sha256=3yoekiWSF-xnaWMqG76XrYPMqerg-42TdmfsW_ivK9E,18640
sqlalchemy/dialects/postgresql/pg_catalog.py,sha256=hY3NXEUHxTWD4umhd2aowNu3laC-61Q_qQ_pReyXTUM,9254
sqlalchemy/dialects/postgresql/provision.py,sha256=t6TZj0XaWG9zrpCjNr0oJRjAC_WQzaNdp3kaKJIbS8I,5770
sqlalchemy/dialects/postgresql/psycopg.py,sha256=Uwf45f9fInOtaExiEdwiP9xzRo7hw0XyZTkRtgdom44,23168
sqlalchemy/dialects/postgresql/psycopg2.py,sha256=kwEnflz5bAqJcuO_20eYiCtha_a4m_tg5_lppdDnaeU,31998
sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=M7wAYSL6Pvt-4nbfacAHGyyw4XMKJ_bQZ1tc1pBtIdg,1756
sqlalchemy/dialects/postgresql/ranges.py,sha256=6CgV7qkxEMJ9AQsiibo_XBLJYzGh-2ZxpG83sRaesVY,32949
sqlalchemy/dialects/postgresql/types.py,sha256=Jfxqw9JaKNOq29JRWBublywgb3lLMyzx8YZI7CXpS2s,7300
sqlalchemy/dialects/sqlite/__init__.py,sha256=lp9DIggNn349M-7IYhUA8et8--e8FRExWD2V_r1LJk4,1182
sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/json.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=g3qGV6jmiXabWyb3282g_Nmxtj1jThxGSe9C9yalb-U,12345
sqlalchemy/dialects/sqlite/base.py,sha256=LcnW6hzxqTtPlDBOInHumvuDt8a31THA5Jnm4vFvdFI,97811
sqlalchemy/dialects/sqlite/dml.py,sha256=9GE55WvwoktKy2fHeT-Wbc9xPHgsbh5oBfd_fckMH5Q,8443
sqlalchemy/dialects/sqlite/json.py,sha256=Eoplbb_4dYlfrtmQaI8Xddd2suAIHA-IdbDQYM-LIhs,2777
sqlalchemy/dialects/sqlite/provision.py,sha256=UCpmwxf4IWlrpb2eLHGbPTpCFVbdI_KAh2mKtjiLYao,5632
sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=OL2S_05DK9kllZj6DOz7QtEl7jI7syxjW6woS725ii4,5356
sqlalchemy/dialects/sqlite/pysqlite.py,sha256=aDp47n0J509kl2hDchoaBKXEQVZtkux54DwfKytUAe4,28068
sqlalchemy/dialects/type_migration_guidelines.txt,sha256=-uHNdmYFGB7bzUNT6i8M5nb4j6j9YUKAtW4lcBZqsMg,8239
sqlalchemy/engine/__init__.py,sha256=Stb2oV6l8w65JvqEo6J4qtKoApcmOpXy3AAxQud4C1o,2818
sqlalchemy/engine/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/_py_processors.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/_py_row.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/_py_util.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/base.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/characteristics.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/create.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/cursor.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/default.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/events.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/interfaces.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/mock.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/processors.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/reflection.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/result.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/row.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/url.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/util.cpython-312.pyc,,
sqlalchemy/engine/_py_processors.py,sha256=j9i_lcYYQOYJMcsDerPxI0sVFBIlX5sqoYMdMJlgWPI,3744
sqlalchemy/engine/_py_row.py,sha256=wSqoUFzLOJ1f89kgDb6sJm9LUrF5LMFpXPcK1vUsKcs,3787
sqlalchemy/engine/_py_util.py,sha256=f2DI3AN1kv6EplelowesCVpwS8hSXNufRkZoQmJtSH8,2484
sqlalchemy/engine/base.py,sha256=frWSMmt3dlentYH4QNN3cijdGzp8NbunColUZwWsWgI,122958
sqlalchemy/engine/characteristics.py,sha256=N3kbvw_ApMh86wb5yAGnxtPYD4YRhYMWion1H_aVZBI,4765
sqlalchemy/engine/create.py,sha256=mYJtOG2ZKM8sgyfjpGpamW15RDU7JXi5s6iibbJHMIs,33206
sqlalchemy/engine/cursor.py,sha256=cFq61yrw76k-QR_xNUBWuL-Zeyb14ltG-6jo2Q2iuuw,76392
sqlalchemy/engine/default.py,sha256=2wwKKdsagb3QTajRSEw8Hl-EnQ-LmRxy822xOGyenHc,84648
sqlalchemy/engine/events.py,sha256=c0unNFFiHzTAvkUtXoJaxzMFMDwurBkHiiUhuN8qluc,37381
sqlalchemy/engine/interfaces.py,sha256=fcVHOmnMo7JZLHzgSKoK3QsdVHH7kJ_AmrDvwW9Ka3k,112936
sqlalchemy/engine/mock.py,sha256=yvpxgFmRw5G4QsHeF-ZwQGHKES-HqQOucTxFtN1uzdk,4179
sqlalchemy/engine/processors.py,sha256=XyfINKbo-2fjN-mW55YybvFyQMOil50_kVqsunahkNs,2379
sqlalchemy/engine/reflection.py,sha256=gwGs8y7x6py5z-ZWx3hQqQrwpHepMCTJyQcFwWJjPlw,75364
sqlalchemy/engine/result.py,sha256=NZEskTMAcDzK-vjE96Fw8VvBL58s5Y6rt9vXcmZdM4w,77651
sqlalchemy/engine/row.py,sha256=9AAQo9zYDL88GcZ3bjcQTwMT-YIcuGTSMAyTfmBJ_yM,12032
sqlalchemy/engine/strategies.py,sha256=DqFSWaXJPL-29Omot9O0aOcuGL8KmCGyOvnPGDkAJoE,442
sqlalchemy/engine/url.py,sha256=8eWkUaIUyDExOcJ2D4xJXRcn4OY1GQJ3Q2duSX6UGAg,30784
sqlalchemy/engine/util.py,sha256=bNirO8k1S8yOW61uNH-a9QrWtAJ9VGFgbiR0lk1lUQU,5682
sqlalchemy/event/__init__.py,sha256=KBrp622xojnC3FFquxa2JsMamwAbfkvzfv6Op0NKiYc,997
sqlalchemy/event/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/event/__pycache__/api.cpython-312.pyc,,
sqlalchemy/event/__pycache__/attr.cpython-312.pyc,,
sqlalchemy/event/__pycache__/base.cpython-312.pyc,,
sqlalchemy/event/__pycache__/legacy.cpython-312.pyc,,
sqlalchemy/event/__pycache__/registry.cpython-312.pyc,,
sqlalchemy/event/api.py,sha256=DtDVgjKSorOfp9MGJ7fgMWrj4seC_hkwF4D8CW1RFZU,8226
sqlalchemy/event/attr.py,sha256=X8QeHGK4ioSYht1vkhc11f606_mq_t91jMNIT314ubs,20751
sqlalchemy/event/base.py,sha256=270OShTD17-bSFUFnPtKdVnB0NFJZ2AouYPo1wT0aJw,15127
sqlalchemy/event/legacy.py,sha256=teMPs00fO-4g8a_z2omcVKkYce5wj_1uvJO2n2MIeuo,8227
sqlalchemy/event/registry.py,sha256=nfTSSyhjZZXc5wseWB4sXn-YibSc0LKX8mg17XlWmAo,10835
sqlalchemy/events.py,sha256=k-ZD38aSPD29LYhED7CBqttp5MDVVx_YSaWC2-cu9ec,525
sqlalchemy/exc.py,sha256=M_8-O1hd8i6gbyx-TapV400p_Lxq2QqTGMXUAO-YgCc,23976
sqlalchemy/ext/__init__.py,sha256=S1fGKAbycnQDV01gs-JWGaFQ9GCD4QHwKcU2wnugg_o,322
sqlalchemy/ext/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/associationproxy.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/automap.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/baked.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/horizontal_shard.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/hybrid.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/indexable.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/instrumentation.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/mutable.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/orderinglist.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/serializer.cpython-312.pyc,,
sqlalchemy/ext/associationproxy.py,sha256=ZGc_ssGf7FC6eKrja1iTvnWEKLkFZQA8CiVAjR8iVRw,66062
sqlalchemy/ext/asyncio/__init__.py,sha256=1OqSxEyIUn7RWLGyO12F-jAUIvk1I6DXlVy80-Gvkds,1317
sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/base.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/engine.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/exc.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/result.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/session.cpython-312.pyc,,
sqlalchemy/ext/asyncio/base.py,sha256=fl7wxZD9KjgFiCtG3WXrYjHEvanamcsodCqq9pH9lOk,8905
sqlalchemy/ext/asyncio/engine.py,sha256=S_IRWX4QAjj2veLSu4Y3gKBIXkKQt7_2StJAK2_KUDY,48190
sqlalchemy/ext/asyncio/exc.py,sha256=8sII7VMXzs2TrhizhFQMzSfcroRtiesq8o3UwLfXSgQ,639
sqlalchemy/ext/asyncio/result.py,sha256=3rbVIY_wySi50JwaK3Kf2qa3c5Fc8W84FtUpt-9i9Vk,30477
sqlalchemy/ext/asyncio/scoping.py,sha256=UxHAFxtWKqA7TEozyN2h7MJyzSspTCrS-1SlgQLTExo,52608
sqlalchemy/ext/asyncio/session.py,sha256=QpXnqspwYnT28znD1EdpUIaVjQOO1BirtS0BJeBxeZk,63087
sqlalchemy/ext/automap.py,sha256=r0mUSyogNyqdBL4m9AA1NXbLiTLQmtvyQymsssNEipo,61581
sqlalchemy/ext/baked.py,sha256=H6T1il7GY84BhzPFj49UECSpZh_eBuiHomA-QIsYOYQ,17807
sqlalchemy/ext/compiler.py,sha256=6X6sZCAo9v-PQfLbwBSYQUK0-XH2xTE5Jm0Zg6Ka6eM,20877
sqlalchemy/ext/declarative/__init__.py,sha256=20psLdFQbbOWfpdXHZ0CTY6I1k4UqXvKemNVu1LvPOI,1818
sqlalchemy/ext/declarative/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/ext/declarative/__pycache__/extensions.cpython-312.pyc,,
sqlalchemy/ext/declarative/extensions.py,sha256=uCjN1GisQt54AjqYnKYzJdUjnGd2pZBW47WWdPlS7FE,19547
sqlalchemy/ext/horizontal_shard.py,sha256=wuwAPnHymln0unSBnyx-cpX0AfESKSsypaSQTYCvzDk,16750
sqlalchemy/ext/hybrid.py,sha256=IYkCaPZ29gm2cPKPg0cWMkLCEqMykD8-JJTvgacGbmc,52458
sqlalchemy/ext/indexable.py,sha256=UkTelbydKCdKelzbv3HWFFavoET9WocKaGRPGEOVfN8,11032
sqlalchemy/ext/instrumentation.py,sha256=sg8ghDjdHSODFXh_jAmpgemnNX1rxCeeXEG3-PMdrNk,15707
sqlalchemy/ext/mutable.py,sha256=L5ZkHBGYhMaqO75Xtyrk2DBR44RDk0g6Rz2HzHH0F8Q,37355
sqlalchemy/ext/mypy/__init__.py,sha256=0WebDIZmqBD0OTq5JLtd_PmfF9JGxe4d4Qv3Ml3PKUg,241
sqlalchemy/ext/mypy/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/apply.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/infer.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/names.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/plugin.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/util.cpython-312.pyc,,
sqlalchemy/ext/mypy/apply.py,sha256=Aek_-XA1eXihT4attxhfE43yBKtCgsxBSb--qgZKUqc,10550
sqlalchemy/ext/mypy/decl_class.py,sha256=1vVJRII2apnLTUbc5HkJS6Z2GueaUv_eKvhbqh7Wik4,17384
sqlalchemy/ext/mypy/infer.py,sha256=KVnmLFEVS33Al8pUKI7MJbJQu3KeveBUMl78EluBORw,19369
sqlalchemy/ext/mypy/names.py,sha256=Q3ef8XQBgVm9WUwlItqlYCXDNi_kbV5DdLEgbtEMEI8,10479
sqlalchemy/ext/mypy/plugin.py,sha256=74ML8LI9xar0V86oCxnPFv5FQGEEfUzK64vOay4BKFs,9750
sqlalchemy/ext/mypy/util.py,sha256=DKRaurkXHI2lAMAAcEO5GLXbX_m2Xqy7l_juh8Byf5U,9960
sqlalchemy/ext/orderinglist.py,sha256=TGYbsGH72wEZcFNQDYDsZg9OSPuzf__P8YX8_2HtYUo,14384
sqlalchemy/ext/serializer.py,sha256=D0g4jMZkRk0Gjr0L-FZe81SR63h0Zs-9JzuWtT_SD7k,6140
sqlalchemy/future/__init__.py,sha256=q2mw-gxk_xoxJLEvRoyMha3vO1xSRHrslcExOHZwmPA,512
sqlalchemy/future/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/future/__pycache__/engine.cpython-312.pyc,,
sqlalchemy/future/engine.py,sha256=AgIw6vMsef8W6tynOTkxsjd6o_OQDwGjLdbpoMD8ue8,495
sqlalchemy/inspection.py,sha256=MF-LE358wZDUEl1IH8-Uwt2HI65EsQpQW5o5udHkZwA,5063
sqlalchemy/log.py,sha256=8x9UR3nj0uFm6or6bQF-JWb4fYv2zOeQjG_w-0wOJFA,8607
sqlalchemy/orm/__init__.py,sha256=ZYys5nL3RFUDCMOLFDBrRI52F6er3S1U1OY9TeORuKs,8463
sqlalchemy/orm/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/_orm_constructors.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/_typing.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/attributes.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/base.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/bulk_persistence.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/clsregistry.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/collections.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/context.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/decl_api.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/decl_base.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/dependency.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/descriptor_props.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/dynamic.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/evaluator.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/events.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/exc.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/identity.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/instrumentation.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/interfaces.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/loading.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/mapped_collection.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/mapper.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/path_registry.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/persistence.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/properties.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/query.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/relationships.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/scoping.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/session.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/state.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/state_changes.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/strategies.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/strategy_options.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/sync.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/unitofwork.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/util.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/writeonly.cpython-312.pyc,,
sqlalchemy/orm/_orm_constructors.py,sha256=8EQfYsDL2k_ev0eK-wxMl3algouczN38Gu43CrRlAlo,103434
sqlalchemy/orm/_typing.py,sha256=DVBfpHmDVK4x1zxaGJPY2GoTrAsyR6uexv20Lzf1afc,4973
sqlalchemy/orm/attributes.py,sha256=lorOHBJvJJYndOuafWJhHBbQ1pR6FAyimhqz-mErBRQ,92534
sqlalchemy/orm/base.py,sha256=FXkYTSCDUJFQSB5pcyPt2wG-dRctf5P6ySjyjVxQsX0,27502
sqlalchemy/orm/bulk_persistence.py,sha256=1FC23bRJKjpfbp2D5hYuV1qOVIKGSswu9XPXbbSJ5Mo,72663
sqlalchemy/orm/clsregistry.py,sha256=IjoDZwWpjG42ji59L4M1EZvjBEoXPZykzENDtKWxU8A,17974
sqlalchemy/orm/collections.py,sha256=WEKuUCRgLhDhJEIBhZ21UrE0pBOyRm2zxD20GvbgA9g,52243
sqlalchemy/orm/context.py,sha256=FMPyw07OA9OXWQ32RQx52AEa2xTLSkqdYgx9R_yN1x0,112955
sqlalchemy/orm/decl_api.py,sha256=_WPKQ_vSE5k2TLtNmkaxxYmvbhZvkRMrrvCeDxdqDQE,63998
sqlalchemy/orm/decl_base.py,sha256=8R7go5sULTYNRlhYiEjXIJkQ34oPp7DY_fC2nS5D5is,83343
sqlalchemy/orm/dependency.py,sha256=hgjksUWhgbmgHK5GdJdiDCBgDAIGQXIrY-Tj79tbL2k,47631
sqlalchemy/orm/descriptor_props.py,sha256=dR_h4Gvdtpcdp4sj_ZOR4P5Nng2J2vhsvFHouRLlntc,37244
sqlalchemy/orm/dynamic.py,sha256=rWAZ-nfAkREuNjt8e_FRdqYrvHDdbODn1CcfyP8Y18k,9816
sqlalchemy/orm/evaluator.py,sha256=tRETz4dNZ71VsEA8nG0hpefByB-W0zBt02IxcSR5H2g,12353
sqlalchemy/orm/events.py,sha256=1PiGT7JMUWTDAb3X1T79P02BMVDmcWEpatz1FwpLqoA,127777
sqlalchemy/orm/exc.py,sha256=IP40P-wOeXhkYk0YizuTC3wqm6W9cPTaQU08f5MMaQ0,7413
sqlalchemy/orm/identity.py,sha256=jHdCxCpCyda_8mFOfGmN_Pr0XZdKiU-2hFZshlNxbHs,9249
sqlalchemy/orm/instrumentation.py,sha256=M-kZmkUvHUxtf-0mCA8RIM5QmMH1hWlYR_pKMwaidjA,24321
sqlalchemy/orm/interfaces.py,sha256=7Lni4Cue41b1CsmN4VbeUyWwzuNMcKtkrpihc9U-WIw,48690
sqlalchemy/orm/loading.py,sha256=9RacpzFOWbuKgPRWHFmyIvD4fYCLAnkpwBFASyQ2CoI,58277
sqlalchemy/orm/mapped_collection.py,sha256=zK3d3iozORzDruBUrAmkVC0RR3Orj5szk-TSQ24xzIU,19682
sqlalchemy/orm/mapper.py,sha256=W-srpoEc3UIYv_6qTXTd_dG_TVeQcToG77VGrXt85PM,171738
sqlalchemy/orm/path_registry.py,sha256=sJZMv_WPqUpHfQtKWaX3WYFeKBcNJ8C3wOM2mkBGkTE,25920
sqlalchemy/orm/persistence.py,sha256=dzyB2JOXNwQgaCbN8kh0sEz00WFePr48qf8NWVCUZH8,61701
sqlalchemy/orm/properties.py,sha256=eDPFzxYUgdM3uWjHywnb1XW-i0tVKKyx7A2MCD31GQU,29306
sqlalchemy/orm/query.py,sha256=Cf0e94-u1XyoXJoOAmr4iFvtCwNY98kxUYyMPenaWTE,117708
sqlalchemy/orm/relationships.py,sha256=dS5SY0v1MiD7iCNnAQlHaI6prUQhL5EkXT7ijc8FR8E,128644
sqlalchemy/orm/scoping.py,sha256=rJVc7_Lic4V00HZ-UvYFWkVpXqdrMayRmIs4fIwH1UA,78688
sqlalchemy/orm/session.py,sha256=CZJTQ-wPwIy0c3AMFxgJnBgaft6eEf4JzcCLcaaCSjg,195979
sqlalchemy/orm/state.py,sha256=327-F4TG29s6mLC8oWRiO2PuvYIUZzY1MqUPjtUy7M4,37670
sqlalchemy/orm/state_changes.py,sha256=qKYg7NxwrDkuUY3EPygAztym6oAVUFcP2wXn7QD3Mz4,6815
sqlalchemy/orm/strategies.py,sha256=-tsBRsmEqkaxAAIn4t2F-U5SrRIPoPCyzpqFYGTAwNs,119866
sqlalchemy/orm/strategy_options.py,sha256=oeDl_rMDNAC_90N7ytsni-psXWAeQMhABQFyKBSmai0,85353
sqlalchemy/orm/sync.py,sha256=g7iZfSge1HgxMk9SKRgUgtHEbpbZ1kP_CBqOIdTOXqc,5779
sqlalchemy/orm/unitofwork.py,sha256=fiVaqcymbDDHRa1NjS90N9Z466nd5pkJOEi1dHO6QLY,27033
sqlalchemy/orm/util.py,sha256=5SC4MOVU0cPObexDjpMvXvetueiU5pze42raL94gj24,81021
sqlalchemy/orm/writeonly.py,sha256=SYu2sAaHZONk2pW4PmtE871LG-O0P_bjidvKzY1H_zI,22305
sqlalchemy/pool/__init__.py,sha256=qiDdq4r4FFAoDrK6ncugF_i6usi_X1LeJt-CuBHey0s,1804
sqlalchemy/pool/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/pool/__pycache__/base.cpython-312.pyc,,
sqlalchemy/pool/__pycache__/events.cpython-312.pyc,,
sqlalchemy/pool/__pycache__/impl.cpython-312.pyc,,
sqlalchemy/pool/base.py,sha256=WF4az4ZKuzQGuKeSJeyexaYjmWZUvYdC6KIi8zTGodw,52236
sqlalchemy/pool/events.py,sha256=xGjkIUZl490ZDtCHqnQF9ZCwe2Jv93eGXmnQxftB11E,13147
sqlalchemy/pool/impl.py,sha256=JwpALSkH-pCoO_6oENbkHYY00Jx9nlttyoI61LivRNc,18944
sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
sqlalchemy/schema.py,sha256=dKiWmgHYjcKQ4TiiD6vD0UMmIsD8u0Fsor1M9AAeGUs,3194
sqlalchemy/sql/__init__.py,sha256=UNa9EUiYWoPayf-FzNcwVgQvpsBdInPZfpJesAStN9o,5820
sqlalchemy/sql/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_dml_constructors.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_elements_constructors.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_orm_types.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_py_util.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_typing.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/annotation.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/base.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/cache_key.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/coercions.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/compiler.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/crud.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/ddl.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/default_comparator.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/dml.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/elements.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/events.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/expression.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/functions.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/lambdas.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/naming.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/operators.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/roles.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/schema.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/selectable.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/sqltypes.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/traversals.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/type_api.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/util.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/visitors.cpython-312.pyc,,
sqlalchemy/sql/_dml_constructors.py,sha256=YdBJex0MCVACv4q2nl_ii3uhxzwU6aDB8zAsratX5UQ,3867
sqlalchemy/sql/_elements_constructors.py,sha256=833Flez92odZkE2Vy6SXK8LcoO1AwkfVzOnATJLWFsA,63168
sqlalchemy/sql/_orm_types.py,sha256=T-vjcry4C1y0GToFKVxQCnmly_-Zsq4IO4SHN6bvUF4,625
sqlalchemy/sql/_py_util.py,sha256=hiM9ePbRSGs60bAMxPFuJCIC_p9SQ1VzqXGiPchiYwE,2173
sqlalchemy/sql/_selectable_constructors.py,sha256=wjE6HrLm9cR7bxvZXT8sFLUqT6t_J9G1XyQCnYmBDl0,18780
sqlalchemy/sql/_typing.py,sha256=oqwrYHVMtK-AuKGH9c4SgfiOEJUt5vjkzSEzzscMHkM,12771
sqlalchemy/sql/annotation.py,sha256=aqbbVz9kfbCT3_66CZ9GEirVN197Cukoqt8rq48FgkQ,18245
sqlalchemy/sql/base.py,sha256=M1b-Tg49ikUW2mnZv0aI38oASG6dgeo4jBNWDgJgAg8,73925
sqlalchemy/sql/cache_key.py,sha256=0Db8mR8IrpBgdzXs4TGTt98LOpL3c7KABd72MAPKUQQ,33668
sqlalchemy/sql/coercions.py,sha256=hAEou9Ycyswzu8yz_Q7QkwL2_c3nctzBJQS2oDEr4iE,40664
sqlalchemy/sql/compiler.py,sha256=hrTptbOKIgVIHapywj4Lk5OMwpXvHS-KGg3odFwlo-I,274687
sqlalchemy/sql/crud.py,sha256=HBX4QPtW_PYYJmIKfNr-wE8IdEr963N24WXzFBUZOo0,56514
sqlalchemy/sql/ddl.py,sha256=lKqvOigbcYrDG0euxd5F4tu9HbBi1kmp3eFPc45HH-8,45636
sqlalchemy/sql/default_comparator.py,sha256=utXWsZVGEjflhFfCT4ywa6RnhORc1Rryo87Hga71Rps,16707
sqlalchemy/sql/dml.py,sha256=pn0Lm1ofC5qVZzwGWFW73lPCiNba8OsTeemurJgwRyg,65614
sqlalchemy/sql/elements.py,sha256=YfccXzQc9DlgF8q15kDf-zKBUY_vpIe0FGaVDBPoic4,176544
sqlalchemy/sql/events.py,sha256=iC_Q1Htm1Aobt5tOYxWfHHqNpoytrULORmUKcusH_-E,18290
sqlalchemy/sql/expression.py,sha256=VMX-dLpsZYnVRJpYNDozDUgaj7iQ0HuewUKVefD57PE,7586
sqlalchemy/sql/functions.py,sha256=kMMYplvuIHFAPwxBI03SizwaLcYEHzysecWk-R1V-JM,63762
sqlalchemy/sql/lambdas.py,sha256=DP0Qz7Ypo8QhzMwygGHYgRhwJMx-rNezO1euouH3iYU,49292
sqlalchemy/sql/naming.py,sha256=ZHs1qSV3ou8TYmZ92uvU3sfdklUQlIz4uhe330n05SU,6858
sqlalchemy/sql/operators.py,sha256=himArRqBzrljob3Zfhi_ZS-Jleg1u6YFp0g3d7Co6IM,76106
sqlalchemy/sql/roles.py,sha256=pOsVn_OZD7mF2gJByHf24Rjopt0_Hu3dUCEOK5t4KS8,7662
sqlalchemy/sql/schema.py,sha256=iFleWHkxi-3mKGiK_N1TzUqxnNwOpypB4bWDuAVQe8c,229717
sqlalchemy/sql/selectable.py,sha256=cgyV0AsPy4CXAFdhMiTCkbgaHiFilW9sclzxlHJKH3o,236460
sqlalchemy/sql/sqltypes.py,sha256=5_N9MhprQFWYc3yjcXgFC_DmvkQU-Jz-Ok9nIMYp2Q4,127469
sqlalchemy/sql/traversals.py,sha256=3ScTC1fh1-y8Y478h_2Azmd2xdQdWPWkDve4YgrwMf8,33664
sqlalchemy/sql/type_api.py,sha256=SN16_oNZG6G65cvG6ABPcptz_YV5vfB2fknwJZxrkOs,84464
sqlalchemy/sql/util.py,sha256=qGHQF-tPCj-m1FBerzT7weCanGcXU7dK5m-W7NHio-4,48077
sqlalchemy/sql/visitors.py,sha256=71wdVvhhZL4nJvVwFAs6ssaW-qZgNRSmKjpAcOzF_TA,36317
sqlalchemy/testing/__init__.py,sha256=zgitAYzsCWT_U48ZiifXHHLJFo8nZBYmI-5TueA4_lE,3160
sqlalchemy/testing/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/assertions.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/assertsql.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/asyncio.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/config.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/engines.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/entities.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/exclusions.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/pickleable.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/profiling.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/requirements.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/schema.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/util.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/warnings.cpython-312.pyc,,
sqlalchemy/testing/assertions.py,sha256=gL0rA7CCZJbcVgvWOPV91tTZTRwQc1_Ta0-ykBn83Ew,31439
sqlalchemy/testing/assertsql.py,sha256=IgQG7l94WaiRP8nTbilJh1ZHZl125g7GPq-S5kmQZN0,16817
sqlalchemy/testing/asyncio.py,sha256=kM8uuOqDBagZF0r9xvGmsiirUVLUQ_KBzjUFU67W-b8,3830
sqlalchemy/testing/config.py,sha256=AqyH1qub_gDqX0BvlL-JBQe7N-t2wo8655FtwblUNOY,12090
sqlalchemy/testing/engines.py,sha256=HFJceEBD3Q_TTFQMTtIV5wGWO_a7oUgoKtUF_z636SM,13481
sqlalchemy/testing/entities.py,sha256=IphFegPKbff3Un47jY6bi7_MQXy6qkx_50jX2tHZJR4,3354
sqlalchemy/testing/exclusions.py,sha256=T8B01hmm8WVs-EKcUOQRzabahPqblWJfOidi6bHJ6GA,12460
sqlalchemy/testing/fixtures/__init__.py,sha256=dMClrIoxqlYIFpk2ia4RZpkbfxsS_3EBigr9QsPJ66g,1198
sqlalchemy/testing/fixtures/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/testing/fixtures/__pycache__/base.cpython-312.pyc,,
sqlalchemy/testing/fixtures/__pycache__/mypy.cpython-312.pyc,,
sqlalchemy/testing/fixtures/__pycache__/orm.cpython-312.pyc,,
sqlalchemy/testing/fixtures/__pycache__/sql.cpython-312.pyc,,
sqlalchemy/testing/fixtures/base.py,sha256=9r_J2ksiTzClpUxW0TczICHrWR7Ny8PV8IsBz6TsGFI,12256
sqlalchemy/testing/fixtures/mypy.py,sha256=gdxiwNFIzDlNGSOdvM3gbwDceVCC9t8oM5kKbwyhGBk,11973
sqlalchemy/testing/fixtures/orm.py,sha256=8EFbnaBbXX_Bf4FcCzBUaAHgyVpsLGBHX16SGLqE3Fg,6095
sqlalchemy/testing/fixtures/sql.py,sha256=KZMjco9_3dsuspmkew5Ejp88Wlr9PsSBB1qeJGFxQAk,15900
sqlalchemy/testing/pickleable.py,sha256=U9mIqk-zaxq9Xfy7HErP7UrKgTov-A3QFnhZh-NiOjI,2833
sqlalchemy/testing/plugin/__init__.py,sha256=79F--BIY_NTBzVRIlJGgAY5LNJJ3cD19XvrAo4X0W9A,247
sqlalchemy/testing/plugin/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,,
sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-312.pyc,,
sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-312.pyc,,
sqlalchemy/testing/plugin/bootstrap.py,sha256=oYScMbEW4pCnWlPEAq1insFruCXFQeEVBwo__i4McpU,1685
sqlalchemy/testing/plugin/plugin_base.py,sha256=BgNzWNEmgpK4CwhyblQQKnH-7FDKVi_Uul5vw8fFjBU,21578
sqlalchemy/testing/plugin/pytestplugin.py,sha256=6jkQHH2VQMD75k2As9CuWXmEy9jrscoFRhCNg6-PaTw,27656
sqlalchemy/testing/profiling.py,sha256=PbuPhRFbauFilUONeY3tV_Y_5lBkD7iCa8VVyH2Sk9Y,10148
sqlalchemy/testing/provision.py,sha256=3qFor_sN1FFlS7odUGkKqLUxGmQZC9XM67I9vQ_zeXo,14626
sqlalchemy/testing/requirements.py,sha256=Z__o-1Rj9B7dI8E_l3qsKTvsg0rK198vB0A1p7A5dcM,52832
sqlalchemy/testing/schema.py,sha256=lr4GkGrGwagaHMuSGzWdzkMaj3HnS7dgfLLWfxt__-U,6513
sqlalchemy/testing/suite/__init__.py,sha256=Y5DRNG0Yl1u3ypt9zVF0Z9suPZeuO_UQGLl-wRgvTjU,722
sqlalchemy/testing/suite/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_cte.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_insert.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_results.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_select.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_types.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-312.pyc,,
sqlalchemy/testing/suite/test_cte.py,sha256=6zBC3W2OwX1Xs-HedzchcKN2S7EaLNkgkvV_JSZ_Pq0,6451
sqlalchemy/testing/suite/test_ddl.py,sha256=1Npkf0C_4UNxphthAGjG078n0vPEgnSIHpDu5MfokxQ,12031
sqlalchemy/testing/suite/test_deprecations.py,sha256=BcJxZTcjYqeOAENVElCg3hVvU6fkGEW3KGBMfnW8bng,5337
sqlalchemy/testing/suite/test_dialect.py,sha256=EH4ZQWbnGdtjmx5amZtTyhYmrkXJCvW1SQoLahoE7uk,22923
sqlalchemy/testing/suite/test_insert.py,sha256=9azifj6-OCD7s8h_tAO1uPw100ibQv8YoKc_VA3hn3c,18824
sqlalchemy/testing/suite/test_reflection.py,sha256=7sML8-owubSQeEM7Ve6LbnB8uIVlNV00WWepKwII2a8,109648
sqlalchemy/testing/suite/test_results.py,sha256=X720GafdA4p75SOGS93j-dXkt6QDEnnJbU2bh18VCcg,16914
sqlalchemy/testing/suite/test_rowcount.py,sha256=3KDTlRgjpQ1OVfp__1cv8Hvq4CsDKzmrhJQ_WIJWoJg,7900
sqlalchemy/testing/suite/test_select.py,sha256=ulRZQJlzkwwcewEyisuBEXVWFR0Wshz9MEDxYYiYLwQ,61732
sqlalchemy/testing/suite/test_sequence.py,sha256=66bCoy4xo99GBSaX6Hxb88foANAykLGRz1YEKbvpfuA,9923
sqlalchemy/testing/suite/test_types.py,sha256=K4MGHvnTtgqeksoQOBCZRVQYC7HoYO6Z6rVt5vj2t9o,67805
sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=c3_eIxLyORuSOhNDP0jWKxPyUf3SwMFpdalxtquwqlM,6141
sqlalchemy/testing/suite/test_update_delete.py,sha256=yTiM2unnfOK9rK8ZkqeTTU_MkT-RsKFLmdYliniZfAY,3994
sqlalchemy/testing/util.py,sha256=qldXKw8gRJ4I2x3uXsBssYMqwatmcMFMTOveRQCmfDU,14469
sqlalchemy/testing/warnings.py,sha256=fJ-QJUY2zY2PPxZJKv9medW-BKKbCNbA4Ns_V3YwFXM,1546
sqlalchemy/types.py,sha256=cQFM-hFRmaf1GErun1qqgEs6QxufvzMuwKqj9tuMPpE,3168
sqlalchemy/util/__init__.py,sha256=5D5Mquvx3SOmud0QErKzzGvBTkqMdhrrd_sXijOILeo,8312
sqlalchemy/util/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/util/__pycache__/_collections.cpython-312.pyc,,
sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-312.pyc,,
sqlalchemy/util/__pycache__/_has_cy.cpython-312.pyc,,
sqlalchemy/util/__pycache__/_py_collections.cpython-312.pyc,,
sqlalchemy/util/__pycache__/compat.cpython-312.pyc,,
sqlalchemy/util/__pycache__/concurrency.cpython-312.pyc,,
sqlalchemy/util/__pycache__/deprecations.cpython-312.pyc,,
sqlalchemy/util/__pycache__/langhelpers.cpython-312.pyc,,
sqlalchemy/util/__pycache__/preloaded.cpython-312.pyc,,
sqlalchemy/util/__pycache__/queue.cpython-312.pyc,,
sqlalchemy/util/__pycache__/tool_support.cpython-312.pyc,,
sqlalchemy/util/__pycache__/topological.cpython-312.pyc,,
sqlalchemy/util/__pycache__/typing.cpython-312.pyc,,
sqlalchemy/util/_collections.py,sha256=aZoSAVOXnHBoYEsxDOi0O9odg9wqLbGb7PGjaWQKiyY,20078
sqlalchemy/util/_concurrency_py3k.py,sha256=zb0Bow2Y_QjTdaACEviBEEaFvqDuVvpJfmwCjaw8xNE,9170
sqlalchemy/util/_has_cy.py,sha256=wCQmeSjT3jaH_oxfCEtGk-1g0gbSpt5MCK5UcWdMWqk,1247
sqlalchemy/util/_py_collections.py,sha256=U6L5AoyLdgSv7cdqB4xxQbw1rpeJjyOZVXffgxgga8I,16714
sqlalchemy/util/compat.py,sha256=cnucBQOKspo58vjRpQXUBrHGguHOSFvftpD-I8vfUy0,8760
sqlalchemy/util/concurrency.py,sha256=9lT_cMoO1fZNdY8QTUZ22oeSf-L5I-79Ke7chcBNPA0,3304
sqlalchemy/util/deprecations.py,sha256=YBwvvYhSB8LhasIZRKvg_-WNoVhPUcaYI1ZrnjDn868,11971
sqlalchemy/util/langhelpers.py,sha256=uIK3szZuq9aMnO-vEpSlNekNWv4I-E391e56bkTnUm0,65090
sqlalchemy/util/preloaded.py,sha256=az7NmLJLsqs0mtM9uBkIu10-841RYDq8wOyqJ7xXvqE,5904
sqlalchemy/util/queue.py,sha256=CaeSEaYZ57YwtmLdNdOIjT5PK_LCuwMFiO0mpp39ybM,10185
sqlalchemy/util/tool_support.py,sha256=9braZyidaiNrZVsWtGmkSmus50-byhuYrlAqvhjcmnA,6135
sqlalchemy/util/topological.py,sha256=N3M3Le7KzGHCmqPGg0ZBqixTDGwmFLhOZvBtc4rHL_g,3458
sqlalchemy/util/typing.py,sha256=lFcGo1dJbZIZ9drAnvef-PzP0cX4LMxMSwgk3lJBb0g,18182

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: setuptools (75.1.0)
Root-Is-Purelib: false
Tag: cp312-cp312-manylinux_2_17_x86_64
Tag: cp312-cp312-manylinux2014_x86_64

@ -0,0 +1,20 @@
Copyright 2010 Jason Kirtland
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,60 @@
Metadata-Version: 2.3
Name: blinker
Version: 1.9.0
Summary: Fast, simple object-to-object and broadcast signaling
Author: Jason Kirtland
Maintainer-email: Pallets Ecosystem <contact@palletsprojects.com>
Requires-Python: >=3.9
Description-Content-Type: text/markdown
Classifier: Development Status :: 5 - Production/Stable
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python
Classifier: Typing :: Typed
Project-URL: Chat, https://discord.gg/pallets
Project-URL: Documentation, https://blinker.readthedocs.io
Project-URL: Source, https://github.com/pallets-eco/blinker/
# Blinker
Blinker provides a fast dispatching system that allows any number of
interested parties to subscribe to events, or "signals".
## Pallets Community Ecosystem
> [!IMPORTANT]\
> This project is part of the Pallets Community Ecosystem. Pallets is the open
> source organization that maintains Flask; Pallets-Eco enables community
> maintenance of related projects. If you are interested in helping maintain
> this project, please reach out on [the Pallets Discord server][discord].
>
> [discord]: https://discord.gg/pallets
## Example
Signal receivers can subscribe to specific senders or receive signals
sent by any sender.
```pycon
>>> from blinker import signal
>>> started = signal('round-started')
>>> def each(round):
... print(f"Round {round}")
...
>>> started.connect(each)
>>> def round_two(round):
... print("This is round two.")
...
>>> started.connect(round_two, sender=2)
>>> for round in range(1, 4):
... started.send(round)
...
Round 1!
Round 2!
This is round two.
Round 3!
```

@ -0,0 +1,12 @@
blinker-1.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
blinker-1.9.0.dist-info/LICENSE.txt,sha256=nrc6HzhZekqhcCXSrhvjg5Ykx5XphdTw6Xac4p-spGc,1054
blinker-1.9.0.dist-info/METADATA,sha256=uIRiM8wjjbHkCtbCyTvctU37IAZk0kEe5kxAld1dvzA,1633
blinker-1.9.0.dist-info/RECORD,,
blinker-1.9.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
blinker/__init__.py,sha256=I2EdZqpy4LyjX17Hn1yzJGWCjeLaVaPzsMgHkLfj_cQ,317
blinker/__pycache__/__init__.cpython-312.pyc,,
blinker/__pycache__/_utilities.cpython-312.pyc,,
blinker/__pycache__/base.cpython-312.pyc,,
blinker/_utilities.py,sha256=0J7eeXXTUx0Ivf8asfpx0ycVkp0Eqfqnj117x2mYX9E,1675
blinker/base.py,sha256=QpDuvXXcwJF49lUBcH5BiST46Rz9wSG7VW_p7N_027M,19132
blinker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: flit 3.10.1
Root-Is-Purelib: true
Tag: py3-none-any

@ -0,0 +1,17 @@
from __future__ import annotations
from .base import ANY
from .base import default_namespace
from .base import NamedSignal
from .base import Namespace
from .base import Signal
from .base import signal
__all__ = [
"ANY",
"default_namespace",
"NamedSignal",
"Namespace",
"Signal",
"signal",
]

@ -0,0 +1,64 @@
from __future__ import annotations
import collections.abc as c
import inspect
import typing as t
from weakref import ref
from weakref import WeakMethod
T = t.TypeVar("T")
class Symbol:
"""A constant symbol, nicer than ``object()``. Repeated calls return the
same instance.
>>> Symbol('foo') is Symbol('foo')
True
>>> Symbol('foo')
foo
"""
symbols: t.ClassVar[dict[str, Symbol]] = {}
def __new__(cls, name: str) -> Symbol:
if name in cls.symbols:
return cls.symbols[name]
obj = super().__new__(cls)
cls.symbols[name] = obj
return obj
def __init__(self, name: str) -> None:
self.name = name
def __repr__(self) -> str:
return self.name
def __getnewargs__(self) -> tuple[t.Any, ...]:
return (self.name,)
def make_id(obj: object) -> c.Hashable:
"""Get a stable identifier for a receiver or sender, to be used as a dict
key or in a set.
"""
if inspect.ismethod(obj):
# The id of a bound method is not stable, but the id of the unbound
# function and instance are.
return id(obj.__func__), id(obj.__self__)
if isinstance(obj, (str, int)):
# Instances with the same value always compare equal and have the same
# hash, even if the id may change.
return obj
# Assume other types are not hashable but will always be the same instance.
return id(obj)
def make_ref(obj: T, callback: c.Callable[[ref[T]], None] | None = None) -> ref[T]:
if inspect.ismethod(obj):
return WeakMethod(obj, callback) # type: ignore[arg-type, return-value]
return ref(obj, callback)

@ -0,0 +1,512 @@
from __future__ import annotations
import collections.abc as c
import sys
import typing as t
import weakref
from collections import defaultdict
from contextlib import contextmanager
from functools import cached_property
from inspect import iscoroutinefunction
from ._utilities import make_id
from ._utilities import make_ref
from ._utilities import Symbol
F = t.TypeVar("F", bound=c.Callable[..., t.Any])
ANY = Symbol("ANY")
"""Symbol for "any sender"."""
ANY_ID = 0
class Signal:
"""A notification emitter.
:param doc: The docstring for the signal.
"""
ANY = ANY
"""An alias for the :data:`~blinker.ANY` sender symbol."""
set_class: type[set[t.Any]] = set
"""The set class to use for tracking connected receivers and senders.
Python's ``set`` is unordered. If receivers must be dispatched in the order
they were connected, an ordered set implementation can be used.
.. versionadded:: 1.7
"""
@cached_property
def receiver_connected(self) -> Signal:
"""Emitted at the end of each :meth:`connect` call.
The signal sender is the signal instance, and the :meth:`connect`
arguments are passed through: ``receiver``, ``sender``, and ``weak``.
.. versionadded:: 1.2
"""
return Signal(doc="Emitted after a receiver connects.")
@cached_property
def receiver_disconnected(self) -> Signal:
"""Emitted at the end of each :meth:`disconnect` call.
The sender is the signal instance, and the :meth:`disconnect` arguments
are passed through: ``receiver`` and ``sender``.
This signal is emitted **only** when :meth:`disconnect` is called
explicitly. This signal cannot be emitted by an automatic disconnect
when a weakly referenced receiver or sender goes out of scope, as the
instance is no longer be available to be used as the sender for this
signal.
An alternative approach is available by subscribing to
:attr:`receiver_connected` and setting up a custom weakref cleanup
callback on weak receivers and senders.
.. versionadded:: 1.2
"""
return Signal(doc="Emitted after a receiver disconnects.")
def __init__(self, doc: str | None = None) -> None:
if doc:
self.__doc__ = doc
self.receivers: dict[
t.Any, weakref.ref[c.Callable[..., t.Any]] | c.Callable[..., t.Any]
] = {}
"""The map of connected receivers. Useful to quickly check if any
receivers are connected to the signal: ``if s.receivers:``. The
structure and data is not part of the public API, but checking its
boolean value is.
"""
self.is_muted: bool = False
self._by_receiver: dict[t.Any, set[t.Any]] = defaultdict(self.set_class)
self._by_sender: dict[t.Any, set[t.Any]] = defaultdict(self.set_class)
self._weak_senders: dict[t.Any, weakref.ref[t.Any]] = {}
def connect(self, receiver: F, sender: t.Any = ANY, weak: bool = True) -> F:
"""Connect ``receiver`` to be called when the signal is sent by
``sender``.
:param receiver: The callable to call when :meth:`send` is called with
the given ``sender``, passing ``sender`` as a positional argument
along with any extra keyword arguments.
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
called when :meth:`send` is called with this sender. If ``ANY``, the
receiver will be called for any sender. A receiver may be connected
to multiple senders by calling :meth:`connect` multiple times.
:param weak: Track the receiver with a :mod:`weakref`. The receiver will
be automatically disconnected when it is garbage collected. When
connecting a receiver defined within a function, set to ``False``,
otherwise it will be disconnected when the function scope ends.
"""
receiver_id = make_id(receiver)
sender_id = ANY_ID if sender is ANY else make_id(sender)
if weak:
self.receivers[receiver_id] = make_ref(
receiver, self._make_cleanup_receiver(receiver_id)
)
else:
self.receivers[receiver_id] = receiver
self._by_sender[sender_id].add(receiver_id)
self._by_receiver[receiver_id].add(sender_id)
if sender is not ANY and sender_id not in self._weak_senders:
# store a cleanup for weakref-able senders
try:
self._weak_senders[sender_id] = make_ref(
sender, self._make_cleanup_sender(sender_id)
)
except TypeError:
pass
if "receiver_connected" in self.__dict__ and self.receiver_connected.receivers:
try:
self.receiver_connected.send(
self, receiver=receiver, sender=sender, weak=weak
)
except TypeError:
# TODO no explanation or test for this
self.disconnect(receiver, sender)
raise
return receiver
def connect_via(self, sender: t.Any, weak: bool = False) -> c.Callable[[F], F]:
"""Connect the decorated function to be called when the signal is sent
by ``sender``.
The decorated function will be called when :meth:`send` is called with
the given ``sender``, passing ``sender`` as a positional argument along
with any extra keyword arguments.
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
called when :meth:`send` is called with this sender. If ``ANY``, the
receiver will be called for any sender. A receiver may be connected
to multiple senders by calling :meth:`connect` multiple times.
:param weak: Track the receiver with a :mod:`weakref`. The receiver will
be automatically disconnected when it is garbage collected. When
connecting a receiver defined within a function, set to ``False``,
otherwise it will be disconnected when the function scope ends.=
.. versionadded:: 1.1
"""
def decorator(fn: F) -> F:
self.connect(fn, sender, weak)
return fn
return decorator
@contextmanager
def connected_to(
self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY
) -> c.Generator[None, None, None]:
"""A context manager that temporarily connects ``receiver`` to the
signal while a ``with`` block executes. When the block exits, the
receiver is disconnected. Useful for tests.
:param receiver: The callable to call when :meth:`send` is called with
the given ``sender``, passing ``sender`` as a positional argument
along with any extra keyword arguments.
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
called when :meth:`send` is called with this sender. If ``ANY``, the
receiver will be called for any sender.
.. versionadded:: 1.1
"""
self.connect(receiver, sender=sender, weak=False)
try:
yield None
finally:
self.disconnect(receiver)
@contextmanager
def muted(self) -> c.Generator[None, None, None]:
"""A context manager that temporarily disables the signal. No receivers
will be called if the signal is sent, until the ``with`` block exits.
Useful for tests.
"""
self.is_muted = True
try:
yield None
finally:
self.is_muted = False
def send(
self,
sender: t.Any | None = None,
/,
*,
_async_wrapper: c.Callable[
[c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]], c.Callable[..., t.Any]
]
| None = None,
**kwargs: t.Any,
) -> list[tuple[c.Callable[..., t.Any], t.Any]]:
"""Call all receivers that are connected to the given ``sender``
or :data:`ANY`. Each receiver is called with ``sender`` as a positional
argument along with any extra keyword arguments. Return a list of
``(receiver, return value)`` tuples.
The order receivers are called is undefined, but can be influenced by
setting :attr:`set_class`.
If a receiver raises an exception, that exception will propagate up.
This makes debugging straightforward, with an assumption that correctly
implemented receivers will not raise.
:param sender: Call receivers connected to this sender, in addition to
those connected to :data:`ANY`.
:param _async_wrapper: Will be called on any receivers that are async
coroutines to turn them into sync callables. For example, could run
the receiver with an event loop.
:param kwargs: Extra keyword arguments to pass to each receiver.
.. versionchanged:: 1.7
Added the ``_async_wrapper`` argument.
"""
if self.is_muted:
return []
results = []
for receiver in self.receivers_for(sender):
if iscoroutinefunction(receiver):
if _async_wrapper is None:
raise RuntimeError("Cannot send to a coroutine function.")
result = _async_wrapper(receiver)(sender, **kwargs)
else:
result = receiver(sender, **kwargs)
results.append((receiver, result))
return results
async def send_async(
self,
sender: t.Any | None = None,
/,
*,
_sync_wrapper: c.Callable[
[c.Callable[..., t.Any]], c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]
]
| None = None,
**kwargs: t.Any,
) -> list[tuple[c.Callable[..., t.Any], t.Any]]:
"""Await all receivers that are connected to the given ``sender``
or :data:`ANY`. Each receiver is called with ``sender`` as a positional
argument along with any extra keyword arguments. Return a list of
``(receiver, return value)`` tuples.
The order receivers are called is undefined, but can be influenced by
setting :attr:`set_class`.
If a receiver raises an exception, that exception will propagate up.
This makes debugging straightforward, with an assumption that correctly
implemented receivers will not raise.
:param sender: Call receivers connected to this sender, in addition to
those connected to :data:`ANY`.
:param _sync_wrapper: Will be called on any receivers that are sync
callables to turn them into async coroutines. For example,
could call the receiver in a thread.
:param kwargs: Extra keyword arguments to pass to each receiver.
.. versionadded:: 1.7
"""
if self.is_muted:
return []
results = []
for receiver in self.receivers_for(sender):
if not iscoroutinefunction(receiver):
if _sync_wrapper is None:
raise RuntimeError("Cannot send to a non-coroutine function.")
result = await _sync_wrapper(receiver)(sender, **kwargs)
else:
result = await receiver(sender, **kwargs)
results.append((receiver, result))
return results
def has_receivers_for(self, sender: t.Any) -> bool:
"""Check if there is at least one receiver that will be called with the
given ``sender``. A receiver connected to :data:`ANY` will always be
called, regardless of sender. Does not check if weakly referenced
receivers are still live. See :meth:`receivers_for` for a stronger
search.
:param sender: Check for receivers connected to this sender, in addition
to those connected to :data:`ANY`.
"""
if not self.receivers:
return False
if self._by_sender[ANY_ID]:
return True
if sender is ANY:
return False
return make_id(sender) in self._by_sender
def receivers_for(
self, sender: t.Any
) -> c.Generator[c.Callable[..., t.Any], None, None]:
"""Yield each receiver to be called for ``sender``, in addition to those
to be called for :data:`ANY`. Weakly referenced receivers that are not
live will be disconnected and skipped.
:param sender: Yield receivers connected to this sender, in addition
to those connected to :data:`ANY`.
"""
# TODO: test receivers_for(ANY)
if not self.receivers:
return
sender_id = make_id(sender)
if sender_id in self._by_sender:
ids = self._by_sender[ANY_ID] | self._by_sender[sender_id]
else:
ids = self._by_sender[ANY_ID].copy()
for receiver_id in ids:
receiver = self.receivers.get(receiver_id)
if receiver is None:
continue
if isinstance(receiver, weakref.ref):
strong = receiver()
if strong is None:
self._disconnect(receiver_id, ANY_ID)
continue
yield strong
else:
yield receiver
def disconnect(self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY) -> None:
"""Disconnect ``receiver`` from being called when the signal is sent by
``sender``.
:param receiver: A connected receiver callable.
:param sender: Disconnect from only this sender. By default, disconnect
from all senders.
"""
sender_id: c.Hashable
if sender is ANY:
sender_id = ANY_ID
else:
sender_id = make_id(sender)
receiver_id = make_id(receiver)
self._disconnect(receiver_id, sender_id)
if (
"receiver_disconnected" in self.__dict__
and self.receiver_disconnected.receivers
):
self.receiver_disconnected.send(self, receiver=receiver, sender=sender)
def _disconnect(self, receiver_id: c.Hashable, sender_id: c.Hashable) -> None:
if sender_id == ANY_ID:
if self._by_receiver.pop(receiver_id, None) is not None:
for bucket in self._by_sender.values():
bucket.discard(receiver_id)
self.receivers.pop(receiver_id, None)
else:
self._by_sender[sender_id].discard(receiver_id)
self._by_receiver[receiver_id].discard(sender_id)
def _make_cleanup_receiver(
self, receiver_id: c.Hashable
) -> c.Callable[[weakref.ref[c.Callable[..., t.Any]]], None]:
"""Create a callback function to disconnect a weakly referenced
receiver when it is garbage collected.
"""
def cleanup(ref: weakref.ref[c.Callable[..., t.Any]]) -> None:
# If the interpreter is shutting down, disconnecting can result in a
# weird ignored exception. Don't call it in that case.
if not sys.is_finalizing():
self._disconnect(receiver_id, ANY_ID)
return cleanup
def _make_cleanup_sender(
self, sender_id: c.Hashable
) -> c.Callable[[weakref.ref[t.Any]], None]:
"""Create a callback function to disconnect all receivers for a weakly
referenced sender when it is garbage collected.
"""
assert sender_id != ANY_ID
def cleanup(ref: weakref.ref[t.Any]) -> None:
self._weak_senders.pop(sender_id, None)
for receiver_id in self._by_sender.pop(sender_id, ()):
self._by_receiver[receiver_id].discard(sender_id)
return cleanup
def _cleanup_bookkeeping(self) -> None:
"""Prune unused sender/receiver bookkeeping. Not threadsafe.
Connecting & disconnecting leaves behind a small amount of bookkeeping
data. Typical workloads using Blinker, for example in most web apps,
Flask, CLI scripts, etc., are not adversely affected by this
bookkeeping.
With a long-running process performing dynamic signal routing with high
volume, e.g. connecting to function closures, senders are all unique
object instances. Doing all of this over and over may cause memory usage
to grow due to extraneous bookkeeping. (An empty ``set`` for each stale
sender/receiver pair.)
This method will prune that bookkeeping away, with the caveat that such
pruning is not threadsafe. The risk is that cleanup of a fully
disconnected receiver/sender pair occurs while another thread is
connecting that same pair. If you are in the highly dynamic, unique
receiver/sender situation that has lead you to this method, that failure
mode is perhaps not a big deal for you.
"""
for mapping in (self._by_sender, self._by_receiver):
for ident, bucket in list(mapping.items()):
if not bucket:
mapping.pop(ident, None)
def _clear_state(self) -> None:
"""Disconnect all receivers and senders. Useful for tests."""
self._weak_senders.clear()
self.receivers.clear()
self._by_sender.clear()
self._by_receiver.clear()
class NamedSignal(Signal):
"""A named generic notification emitter. The name is not used by the signal
itself, but matches the key in the :class:`Namespace` that it belongs to.
:param name: The name of the signal within the namespace.
:param doc: The docstring for the signal.
"""
def __init__(self, name: str, doc: str | None = None) -> None:
super().__init__(doc)
#: The name of this signal.
self.name: str = name
def __repr__(self) -> str:
base = super().__repr__()
return f"{base[:-1]}; {self.name!r}>" # noqa: E702
class Namespace(dict[str, NamedSignal]):
"""A dict mapping names to signals."""
def signal(self, name: str, doc: str | None = None) -> NamedSignal:
"""Return the :class:`NamedSignal` for the given ``name``, creating it
if required. Repeated calls with the same name return the same signal.
:param name: The name of the signal.
:param doc: The docstring of the signal.
"""
if name not in self:
self[name] = NamedSignal(name, doc)
return self[name]
class _PNamespaceSignal(t.Protocol):
def __call__(self, name: str, doc: str | None = None) -> NamedSignal: ...
default_namespace: Namespace = Namespace()
"""A default :class:`Namespace` for creating named signals. :func:`signal`
creates a :class:`NamedSignal` in this namespace.
"""
signal: _PNamespaceSignal = default_namespace.signal
"""Return a :class:`NamedSignal` in :data:`default_namespace` with the given
``name``, creating it if required. Repeated calls with the same name return the
same signal.
"""

@ -0,0 +1,28 @@
Copyright 2018 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -0,0 +1,64 @@
Metadata-Version: 2.1
Name: cachelib
Version: 0.13.0
Summary: A collection of cache libraries in the same API interface.
Home-page: https://github.com/pallets-eco/cachelib/
Maintainer: Pallets
Maintainer-email: contact@palletsprojects.com
License: BSD-3-Clause
Project-URL: Donate, https://palletsprojects.com/donate
Project-URL: Documentation, https://cachelib.readthedocs.io/
Project-URL: Changes, https://cachelib.readthedocs.io/changes/
Project-URL: Source Code, https://github.com/pallets-eco/cachelib/
Project-URL: Issue Tracker, https://github.com/pallets-eco/cachelib/issues/
Project-URL: Twitter, https://twitter.com/PalletsTeam
Project-URL: Chat, https://discord.gg/pallets
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Requires-Python: >=3.8
Description-Content-Type: text/x-rst
License-File: LICENSE.rst
CacheLib
========
A collection of cache libraries in the same API interface. Extracted
from Werkzeug.
Installing
----------
Install and update using `pip`_:
.. code-block:: text
$ pip install -U cachelib
.. _pip: https://pip.pypa.io/en/stable/getting-started/
Donate
------
The Pallets organization develops and supports Flask and the libraries
it uses. In order to grow the community of contributors and users, and
allow the maintainers to devote more time to the projects, `please
donate today`_.
.. _please donate today: https://palletsprojects.com/donate
Links
-----
- Documentation: https://cachelib.readthedocs.io/
- Changes: https://cachelib.readthedocs.io/changes/
- PyPI Releases: https://pypi.org/project/cachelib/
- Source Code: https://github.com/pallets/cachelib/
- Issue Tracker: https://github.com/pallets/cachelib/issues/
- Twitter: https://twitter.com/PalletsTeam
- Chat: https://discord.gg/pallets

@ -0,0 +1,27 @@
cachelib-0.13.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
cachelib-0.13.0.dist-info/LICENSE.rst,sha256=zUGBIIEtwmJiga4CfoG2SCKdFmtaynRyzs1RADjTbn0,1475
cachelib-0.13.0.dist-info/METADATA,sha256=lqDsbA03sUETX-dGoDR1uDqqXtyu1m0sS0wBjCDEeXE,1960
cachelib-0.13.0.dist-info/RECORD,,
cachelib-0.13.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
cachelib-0.13.0.dist-info/top_level.txt,sha256=AYC4q8wgGd_hR_F2YcDkmtQm41gv9-5AThKuQtNPEXk,9
cachelib/__init__.py,sha256=xTX_F13-FA58DjBGip4XZeCSlEo6B4c4VTRi49MBywY,575
cachelib/__pycache__/__init__.cpython-312.pyc,,
cachelib/__pycache__/base.cpython-312.pyc,,
cachelib/__pycache__/dynamodb.cpython-312.pyc,,
cachelib/__pycache__/file.cpython-312.pyc,,
cachelib/__pycache__/memcached.cpython-312.pyc,,
cachelib/__pycache__/mongodb.cpython-312.pyc,,
cachelib/__pycache__/redis.cpython-312.pyc,,
cachelib/__pycache__/serializers.cpython-312.pyc,,
cachelib/__pycache__/simple.cpython-312.pyc,,
cachelib/__pycache__/uwsgi.cpython-312.pyc,,
cachelib/base.py,sha256=3_B-cB1VEh_x-VzH9g3qvzdqCxDX2ywDzQ7a_aYFJlE,6731
cachelib/dynamodb.py,sha256=fSmp8G7V0yBcRC2scdIhz8d0D2-9OMZEwQ9AcBONyC8,8512
cachelib/file.py,sha256=xu6m7nzTMcca_wYS0oM4iOxLarQHZFrhUnQnFYansy4,12270
cachelib/memcached.py,sha256=KyUN4wblVPf2XNLYk15kwN9QTfkFK6jrpVGrj4NAoFA,7160
cachelib/mongodb.py,sha256=b9l8fTKMFm8hAXFn748GKertHUASSVDBgPgrtGaZ6cA,6901
cachelib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cachelib/redis.py,sha256=hSKV9fVD7gzk1X_B3Ac9XJYN3G3F6eYBoreqDo9pces,6295
cachelib/serializers.py,sha256=MXk1moN6ljOPUFQ0E0D129mZlrDDwuQ5DvInNkitvoI,3343
cachelib/simple.py,sha256=8UPp95_oc3bLeW_gzFUzdppIKV8hV_o_yQYoKb8gVMk,3422
cachelib/uwsgi.py,sha256=4DX3C9QGvB6mVcg1d7qpLIEkI6bccuq-8M6I_YbPicY,2563

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.42.0)
Root-Is-Purelib: true
Tag: py3-none-any

@ -0,0 +1,22 @@
from cachelib.base import BaseCache
from cachelib.base import NullCache
from cachelib.dynamodb import DynamoDbCache
from cachelib.file import FileSystemCache
from cachelib.memcached import MemcachedCache
from cachelib.mongodb import MongoDbCache
from cachelib.redis import RedisCache
from cachelib.simple import SimpleCache
from cachelib.uwsgi import UWSGICache
__all__ = [
"BaseCache",
"NullCache",
"SimpleCache",
"FileSystemCache",
"MemcachedCache",
"RedisCache",
"UWSGICache",
"DynamoDbCache",
"MongoDbCache",
]
__version__ = "0.13.0"

@ -0,0 +1,185 @@
import typing as _t
class BaseCache:
"""Baseclass for the cache systems. All the cache systems implement this
API or a superset of it.
:param default_timeout: the default timeout (in seconds) that is used if
no timeout is specified on :meth:`set`. A timeout
of 0 indicates that the cache never expires.
"""
def __init__(self, default_timeout: int = 300):
self.default_timeout = default_timeout
def _normalize_timeout(self, timeout: _t.Optional[int]) -> int:
if timeout is None:
timeout = self.default_timeout
return timeout
def get(self, key: str) -> _t.Any:
"""Look up key in the cache and return the value for it.
:param key: the key to be looked up.
:returns: The value if it exists and is readable, else ``None``.
"""
return None
def delete(self, key: str) -> bool:
"""Delete `key` from the cache.
:param key: the key to delete.
:returns: Whether the key existed and has been deleted.
:rtype: boolean
"""
return True
def get_many(self, *keys: str) -> _t.List[_t.Any]:
"""Returns a list of values for the given keys.
For each key an item in the list is created::
foo, bar = cache.get_many("foo", "bar")
Has the same error handling as :meth:`get`.
:param keys: The function accepts multiple keys as positional
arguments.
"""
return [self.get(k) for k in keys]
def get_dict(self, *keys: str) -> _t.Dict[str, _t.Any]:
"""Like :meth:`get_many` but return a dict::
d = cache.get_dict("foo", "bar")
foo = d["foo"]
bar = d["bar"]
:param keys: The function accepts multiple keys as positional
arguments.
"""
return dict(zip(keys, self.get_many(*keys))) # noqa: B905
def set(
self, key: str, value: _t.Any, timeout: _t.Optional[int] = None
) -> _t.Optional[bool]:
"""Add a new key/value to the cache (overwrites value, if key already
exists in the cache).
:param key: the key to set
:param value: the value for the key
:param timeout: the cache timeout for the key in seconds (if not
specified, it uses the default timeout). A timeout of
0 indicates that the cache never expires.
:returns: ``True`` if key has been updated, ``False`` for backend
errors. Pickling errors, however, will raise a subclass of
``pickle.PickleError``.
:rtype: boolean
"""
return True
def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool:
"""Works like :meth:`set` but does not overwrite the values of already
existing keys.
:param key: the key to set
:param value: the value for the key
:param timeout: the cache timeout for the key in seconds (if not
specified, it uses the default timeout). A timeout of
0 indicates that the cache never expires.
:returns: Same as :meth:`set`, but also ``False`` for already
existing keys.
:rtype: boolean
"""
return True
def set_many(
self, mapping: _t.Dict[str, _t.Any], timeout: _t.Optional[int] = None
) -> _t.List[_t.Any]:
"""Sets multiple keys and values from a mapping.
:param mapping: a mapping with the keys/values to set.
:param timeout: the cache timeout for the key in seconds (if not
specified, it uses the default timeout). A timeout of
0 indicates that the cache never expires.
:returns: A list containing all keys successfully set
:rtype: boolean
"""
set_keys = []
for key, value in mapping.items():
if self.set(key, value, timeout):
set_keys.append(key)
return set_keys
def delete_many(self, *keys: str) -> _t.List[_t.Any]:
"""Deletes multiple keys at once.
:param keys: The function accepts multiple keys as positional
arguments.
:returns: A list containing all successfully deleted keys
:rtype: boolean
"""
deleted_keys = []
for key in keys:
if self.delete(key):
deleted_keys.append(key)
return deleted_keys
def has(self, key: str) -> bool:
"""Checks if a key exists in the cache without returning it. This is a
cheap operation that bypasses loading the actual data on the backend.
:param key: the key to check
"""
raise NotImplementedError(
"%s doesn't have an efficient implementation of `has`. That "
"means it is impossible to check whether a key exists without "
"fully loading the key's data. Consider using `self.get` "
"explicitly if you don't care about performance."
)
def clear(self) -> bool:
"""Clears the cache. Keep in mind that not all caches support
completely clearing the cache.
:returns: Whether the cache has been cleared.
:rtype: boolean
"""
return True
def inc(self, key: str, delta: int = 1) -> _t.Optional[int]:
"""Increments the value of a key by `delta`. If the key does
not yet exist it is initialized with `delta`.
For supporting caches this is an atomic operation.
:param key: the key to increment.
:param delta: the delta to add.
:returns: The new value or ``None`` for backend errors.
"""
value = (self.get(key) or 0) + delta
return value if self.set(key, value) else None
def dec(self, key: str, delta: int = 1) -> _t.Optional[int]:
"""Decrements the value of a key by `delta`. If the key does
not yet exist it is initialized with `-delta`.
For supporting caches this is an atomic operation.
:param key: the key to increment.
:param delta: the delta to subtract.
:returns: The new value or `None` for backend errors.
"""
value = (self.get(key) or 0) - delta
return value if self.set(key, value) else None
class NullCache(BaseCache):
"""A cache that doesn't cache. This can be useful for unit testing.
:param default_timeout: a dummy parameter that is ignored but exists
for API compatibility with other caches.
"""
def has(self, key: str) -> bool:
return False

@ -0,0 +1,226 @@
import datetime
import typing as _t
from cachelib.base import BaseCache
from cachelib.serializers import DynamoDbSerializer
CREATED_AT_FIELD = "created_at"
RESPONSE_FIELD = "response"
class DynamoDbCache(BaseCache):
"""
Implementation of cachelib.BaseCache that uses an AWS DynamoDb table
as the backend.
Your server process will require dynamodb:GetItem and dynamodb:PutItem
IAM permissions on the cache table.
Limitations: DynamoDB table items are limited to 400 KB in size. Since
this class stores cached items in a table, the max size of a cache entry
will be slightly less than 400 KB, since the cache key and expiration
time fields are also part of the item.
:param table_name: The name of the DynamoDB table to use
:param default_timeout: Set the timeout in seconds after which cache entries
expire
:param key_field: The name of the hash_key attribute in the DynamoDb
table. This must be a string attribute.
:param expiration_time_field: The name of the table attribute to store the
expiration time in. This will be an int
attribute. The timestamp will be stored as
seconds past the epoch. If you configure
this as the TTL field, then DynamoDB will
automatically delete expired entries.
:param key_prefix: A prefix that should be added to all keys.
"""
serializer = DynamoDbSerializer()
def __init__(
self,
table_name: _t.Optional[str] = "python-cache",
default_timeout: int = 300,
key_field: _t.Optional[str] = "cache_key",
expiration_time_field: _t.Optional[str] = "expiration_time",
key_prefix: _t.Optional[str] = None,
**kwargs: _t.Any
):
super().__init__(default_timeout)
try:
import boto3 # type: ignore
except ImportError as err:
raise RuntimeError("no boto3 module found") from err
self._table_name = table_name
self._key_field = key_field
self._expiration_time_field = expiration_time_field
self.key_prefix = key_prefix or ""
self._dynamo = boto3.resource("dynamodb", **kwargs)
self._attr = boto3.dynamodb.conditions.Attr
try:
self._table = self._dynamo.Table(table_name)
self._table.load()
# catch this exception (triggered if the table doesn't exist)
except Exception:
table = self._dynamo.create_table(
AttributeDefinitions=[
{"AttributeName": key_field, "AttributeType": "S"}
],
TableName=table_name,
KeySchema=[
{"AttributeName": key_field, "KeyType": "HASH"},
],
BillingMode="PAY_PER_REQUEST",
)
table.wait_until_exists()
dynamo = boto3.client("dynamodb", **kwargs)
dynamo.update_time_to_live(
TableName=table_name,
TimeToLiveSpecification={
"Enabled": True,
"AttributeName": expiration_time_field,
},
)
self._table = self._dynamo.Table(table_name)
self._table.load()
def _utcnow(self) -> _t.Any:
"""Return a tz-aware UTC datetime representing the current time"""
return datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc)
def _get_item(self, key: str, attributes: _t.Optional[list] = None) -> _t.Any:
"""
Get an item from the cache table, optionally limiting the returned
attributes.
:param key: The cache key of the item to fetch
:param attributes: An optional list of attributes to fetch. If not
given, all attributes are fetched. The
expiration_time field will always be added to the
list of fetched attributes.
:return: The table item for key if it exists and is not expired, else
None
"""
kwargs = {}
if attributes:
if self._expiration_time_field not in attributes:
attributes = list(attributes) + [self._expiration_time_field]
kwargs = dict(ProjectionExpression=",".join(attributes))
response = self._table.get_item(Key={self._key_field: key}, **kwargs)
cache_item = response.get("Item")
if cache_item:
now = int(self._utcnow().timestamp())
if cache_item.get(self._expiration_time_field, now + 100) > now:
return cache_item
return None
def get(self, key: str) -> _t.Any:
"""
Get a cache item
:param key: The cache key of the item to fetch
:return: cache value if not expired, else None
"""
cache_item = self._get_item(self.key_prefix + key)
if cache_item:
response = cache_item[RESPONSE_FIELD]
value = self.serializer.loads(response)
return value
return None
def delete(self, key: str) -> bool:
"""
Deletes an item from the cache. This is a no-op if the item doesn't
exist
:param key: Key of the item to delete.
:return: True if the key existed and was deleted
"""
try:
self._table.delete_item(
Key={self._key_field: self.key_prefix + key},
ConditionExpression=self._attr(self._key_field).exists(),
)
return True
except self._dynamo.meta.client.exceptions.ConditionalCheckFailedException:
return False
def _set(
self,
key: str,
value: _t.Any,
timeout: _t.Optional[int] = None,
overwrite: _t.Optional[bool] = True,
) -> _t.Any:
"""
Store a cache item, with the option to not overwrite existing items
:param key: Cache key to use
:param value: a serializable object
:param timeout: The timeout in seconds for the cached item, to override
the default
:param overwrite: If true, overwrite any existing cache item with key.
If false, the new value will only be stored if no
non-expired cache item exists with key.
:return: True if the new item was stored.
"""
timeout = self._normalize_timeout(timeout)
now = self._utcnow()
kwargs = {}
if not overwrite:
# Cause the put to fail if a non-expired item with this key
# already exists
cond = self._attr(self._key_field).not_exists() | self._attr(
self._expiration_time_field
).lte(int(now.timestamp()))
kwargs = dict(ConditionExpression=cond)
try:
dump = self.serializer.dumps(value)
item = {
self._key_field: key,
CREATED_AT_FIELD: now.isoformat(),
RESPONSE_FIELD: dump,
}
if timeout > 0:
expiration_time = now + datetime.timedelta(seconds=timeout)
item[self._expiration_time_field] = int(expiration_time.timestamp())
self._table.put_item(Item=item, **kwargs)
return True
except Exception:
return False
def set(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any:
return self._set(self.key_prefix + key, value, timeout=timeout, overwrite=True)
def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any:
return self._set(self.key_prefix + key, value, timeout=timeout, overwrite=False)
def has(self, key: str) -> bool:
return (
self._get_item(self.key_prefix + key, [self._expiration_time_field])
is not None
)
def clear(self) -> bool:
paginator = self._dynamo.meta.client.get_paginator("scan")
pages = paginator.paginate(
TableName=self._table_name, ProjectionExpression=self._key_field
)
with self._table.batch_writer() as batch:
for page in pages:
for item in page["Items"]:
batch.delete_item(Key=item)
return True

@ -0,0 +1,348 @@
import errno
import hashlib
import logging
import os
import platform
import stat
import struct
import tempfile
import typing as _t
from contextlib import contextmanager
from pathlib import Path
from time import sleep
from time import time
from cachelib.base import BaseCache
from cachelib.serializers import FileSystemSerializer
def _lazy_md5(string: bytes = b"") -> _t.Any:
"""Don't access ``hashlib.md5`` until runtime. FIPS builds may not include
md5, in which case the import and use as a default would fail before the
developer can configure something else.
"""
return hashlib.md5(string)
class FileSystemCache(BaseCache):
"""A cache that stores the items on the file system. This cache depends
on being the only user of the `cache_dir`. Make absolutely sure that
nobody but this cache stores files there or otherwise the cache will
randomly delete files therein.
:param cache_dir: the directory where cache files are stored.
:param threshold: the maximum number of items the cache stores before
it starts deleting some. A threshold value of 0
indicates no threshold.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`. A timeout of
0 indicates that the cache never expires.
:param mode: the file mode wanted for the cache files, default 0600
:param hash_method: Default hashlib.md5. The hash method used to
generate the filename for cached results.
Default is lazy loaded and can be overriden by
seeting `_default_hash_method`
"""
#: used for temporary files by the FileSystemCache
_fs_transaction_suffix = ".__wz_cache"
#: keep amount of files in a cache element
_fs_count_file = "__wz_cache_count"
#: default file name hashing method
_default_hash_method = staticmethod(_lazy_md5)
serializer = FileSystemSerializer()
def __init__(
self,
cache_dir: str,
threshold: int = 500,
default_timeout: int = 300,
mode: _t.Optional[int] = None,
hash_method: _t.Any = None,
):
BaseCache.__init__(self, default_timeout)
self._path = cache_dir
self._threshold = threshold
self._hash_method = self._default_hash_method
if hash_method is not None:
self._hash_method = hash_method
# Mode set by user takes precedence. If no mode has
# been given, we need to set the correct default based
# on user platform.
self._mode = mode
if self._mode is None:
self._mode = self._get_compatible_platform_mode()
try:
os.makedirs(self._path)
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
# If there are many files and a zero threshold,
# the list_dir can slow initialisation massively
if self._threshold != 0:
self._update_count(value=len(list(self._list_dir())))
def _get_compatible_platform_mode(self) -> int:
mode = 0o600 # nix systems
if platform.system() == "Windows":
mode = stat.S_IWRITE
return mode
@property
def _file_count(self) -> int:
return self.get(self._fs_count_file) or 0
def _update_count(
self, delta: _t.Optional[int] = None, value: _t.Optional[int] = None
) -> None:
# If we have no threshold, don't count files
if self._threshold == 0:
return
if delta:
new_count = self._file_count + delta
else:
new_count = value or 0
self.set(self._fs_count_file, new_count, mgmt_element=True)
def _normalize_timeout(self, timeout: _t.Optional[int]) -> int:
timeout = BaseCache._normalize_timeout(self, timeout)
if timeout != 0:
timeout = int(time()) + timeout
return int(timeout)
def _is_mgmt(self, name: str) -> bool:
fshash = self._get_filename(self._fs_count_file).split(os.sep)[-1]
return name == fshash or name.endswith(self._fs_transaction_suffix)
def _list_dir(self) -> _t.Generator[str, None, None]:
"""return a list of (fully qualified) cache filenames"""
return (
os.path.join(self._path, fn)
for fn in os.listdir(self._path)
if not self._is_mgmt(fn)
)
def _over_threshold(self) -> bool:
return self._threshold != 0 and self._file_count > self._threshold
def _remove_expired(self, now: float) -> None:
for fname in self._list_dir():
try:
with self._safe_stream_open(fname, "rb") as f:
expires = struct.unpack("I", f.read(4))[0]
if expires != 0 and expires < now:
os.remove(fname)
self._update_count(delta=-1)
except FileNotFoundError:
pass
except (OSError, EOFError, struct.error):
logging.warning(
"Exception raised while handling cache file '%s'",
fname,
exc_info=True,
)
def _remove_older(self) -> bool:
exp_fname_tuples = []
for fname in self._list_dir():
try:
with self._safe_stream_open(fname, "rb") as f:
timestamp = struct.unpack("I", f.read(4))[0]
exp_fname_tuples.append((timestamp, fname))
except FileNotFoundError:
pass
except (OSError, EOFError, struct.error):
logging.warning(
"Exception raised while handling cache file '%s'",
fname,
exc_info=True,
)
fname_sorted = (
fname for _, fname in sorted(exp_fname_tuples, key=lambda item: item[0])
)
for fname in fname_sorted:
try:
os.remove(fname)
self._update_count(delta=-1)
except FileNotFoundError:
pass
except OSError:
logging.warning(
"Exception raised while handling cache file '%s'",
fname,
exc_info=True,
)
return False
if not self._over_threshold():
break
return True
def _prune(self) -> None:
if self._over_threshold():
now = time()
self._remove_expired(now)
# if still over threshold
if self._over_threshold():
self._remove_older()
def clear(self) -> bool:
for i, fname in enumerate(self._list_dir()):
try:
os.remove(fname)
except FileNotFoundError:
pass
except OSError:
logging.warning(
"Exception raised while handling cache file '%s'",
fname,
exc_info=True,
)
self._update_count(delta=-i)
return False
self._update_count(value=0)
return True
def _get_filename(self, key: str) -> str:
if isinstance(key, str):
bkey = key.encode("utf-8") # XXX unicode review
bkey_hash = self._hash_method(bkey).hexdigest()
else:
raise TypeError(f"Key must be a string, received type {type(key)}")
return os.path.join(self._path, bkey_hash)
def get(self, key: str) -> _t.Any:
filename = self._get_filename(key)
try:
with self._safe_stream_open(filename, "rb") as f:
pickle_time = struct.unpack("I", f.read(4))[0]
if pickle_time == 0 or pickle_time >= time():
return self.serializer.load(f)
except FileNotFoundError:
pass
except (OSError, EOFError, struct.error):
logging.warning(
"Exception raised while handling cache file '%s'",
filename,
exc_info=True,
)
return None
def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool:
filename = self._get_filename(key)
if not os.path.exists(filename):
return self.set(key, value, timeout)
return False
def set(
self,
key: str,
value: _t.Any,
timeout: _t.Optional[int] = None,
mgmt_element: bool = False,
) -> bool:
# Management elements have no timeout
if mgmt_element:
timeout = 0
# Don't prune on management element update, to avoid loop
else:
self._prune()
timeout = self._normalize_timeout(timeout)
filename = self._get_filename(key)
overwrite = os.path.isfile(filename)
try:
fd, tmp = tempfile.mkstemp(
suffix=self._fs_transaction_suffix, dir=self._path
)
with os.fdopen(fd, "wb") as f:
f.write(struct.pack("I", timeout))
self.serializer.dump(value, f)
self._run_safely(os.replace, tmp, filename)
self._run_safely(os.chmod, filename, self._mode)
fsize = Path(filename).stat().st_size
except OSError:
logging.warning(
"Exception raised while handling cache file '%s'",
filename,
exc_info=True,
)
return False
else:
# Management elements should not count towards threshold
if not overwrite and not mgmt_element:
self._update_count(delta=1)
return fsize > 0 # function should fail if file is empty
def delete(self, key: str, mgmt_element: bool = False) -> bool:
try:
os.remove(self._get_filename(key))
except FileNotFoundError: # if file doesn't exist we consider it deleted
return True
except OSError:
logging.warning("Exception raised while handling cache file", exc_info=True)
return False
else:
# Management elements should not count towards threshold
if not mgmt_element:
self._update_count(delta=-1)
return True
def has(self, key: str) -> bool:
filename = self._get_filename(key)
try:
with self._safe_stream_open(filename, "rb") as f:
pickle_time = struct.unpack("I", f.read(4))[0]
if pickle_time == 0 or pickle_time >= time():
return True
else:
return False
except FileNotFoundError: # if there is no file there is no key
return False
except (OSError, EOFError, struct.error):
logging.warning(
"Exception raised while handling cache file '%s'",
filename,
exc_info=True,
)
return False
def _run_safely(self, fn: _t.Callable, *args: _t.Any, **kwargs: _t.Any) -> _t.Any:
"""On Windows os.replace, os.chmod and open can yield
permission errors if executed by two different processes."""
if platform.system() == "Windows":
output = None
wait_step = 0.001
max_sleep_time = 10.0
total_sleep_time = 0.0
while total_sleep_time < max_sleep_time:
try:
output = fn(*args, **kwargs)
except PermissionError:
sleep(wait_step)
total_sleep_time += wait_step
wait_step *= 2
else:
break
else:
output = fn(*args, **kwargs)
return output
@contextmanager
def _safe_stream_open(self, path: str, mode: str) -> _t.Generator:
fs = self._run_safely(open, path, mode)
if fs is None:
raise OSError
try:
yield fs
finally:
fs.close()

@ -0,0 +1,196 @@
import re
import typing as _t
from time import time
from cachelib.base import BaseCache
_test_memcached_key = re.compile(r"[^\x00-\x21\xff]{1,250}$").match
class MemcachedCache(BaseCache):
"""A cache that uses memcached as backend.
The first argument can either be an object that resembles the API of a
:class:`memcache.Client` or a tuple/list of server addresses. In the
event that a tuple/list is passed, Werkzeug tries to import the best
available memcache library.
This cache looks into the following packages/modules to find bindings for
memcached:
- ``pylibmc``
- ``google.appengine.api.memcached``
- ``memcached``
- ``libmc``
Implementation notes: This cache backend works around some limitations in
memcached to simplify the interface. For example unicode keys are encoded
to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return
the keys in the same format as passed. Furthermore all get methods
silently ignore key errors to not cause problems when untrusted user data
is passed to the get methods which is often the case in web applications.
:param servers: a list or tuple of server addresses or alternatively
a :class:`memcache.Client` or a compatible client.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`. A timeout of
0 indicates that the cache never expires.
:param key_prefix: a prefix that is added before all keys. This makes it
possible to use the same memcached server for different
applications. Keep in mind that
:meth:`~BaseCache.clear` will also clear keys with a
different prefix.
"""
def __init__(
self,
servers: _t.Any = None,
default_timeout: int = 300,
key_prefix: _t.Optional[str] = None,
):
BaseCache.__init__(self, default_timeout)
if servers is None or isinstance(servers, (list, tuple)):
if servers is None:
servers = ["127.0.0.1:11211"]
self._client = self.import_preferred_memcache_lib(servers)
if self._client is None:
raise RuntimeError("no memcache module found")
else:
# NOTE: servers is actually an already initialized memcache
# client.
self._client = servers
self.key_prefix = key_prefix
def _normalize_key(self, key: str) -> str:
if self.key_prefix:
key = self.key_prefix + key
return key
def _normalize_timeout(self, timeout: _t.Optional[int]) -> int:
timeout = BaseCache._normalize_timeout(self, timeout)
if timeout > 0:
timeout = int(time()) + timeout
return timeout
def get(self, key: str) -> _t.Any:
key = self._normalize_key(key)
# memcached doesn't support keys longer than that. Because often
# checks for so long keys can occur because it's tested from user
# submitted data etc we fail silently for getting.
if _test_memcached_key(key):
return self._client.get(key)
def get_dict(self, *keys: str) -> _t.Dict[str, _t.Any]:
key_mapping = {}
for key in keys:
encoded_key = self._normalize_key(key)
if _test_memcached_key(key):
key_mapping[encoded_key] = key
_keys = list(key_mapping)
d = rv = self._client.get_multi(_keys) # type: _t.Dict[str, _t.Any]
if self.key_prefix:
rv = {}
for key, value in d.items():
rv[key_mapping[key]] = value
if len(rv) < len(keys):
for key in keys:
if key not in rv:
rv[key] = None
return rv
def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool:
key = self._normalize_key(key)
timeout = self._normalize_timeout(timeout)
return bool(self._client.add(key, value, timeout))
def set(
self, key: str, value: _t.Any, timeout: _t.Optional[int] = None
) -> _t.Optional[bool]:
key = self._normalize_key(key)
timeout = self._normalize_timeout(timeout)
return bool(self._client.set(key, value, timeout))
def get_many(self, *keys: str) -> _t.List[_t.Any]:
d = self.get_dict(*keys)
return [d[key] for key in keys]
def set_many(
self, mapping: _t.Dict[str, _t.Any], timeout: _t.Optional[int] = None
) -> _t.List[_t.Any]:
new_mapping = {}
for key, value in mapping.items():
key = self._normalize_key(key)
new_mapping[key] = value
timeout = self._normalize_timeout(timeout)
failed_keys = self._client.set_multi(
new_mapping, timeout
) # type: _t.List[_t.Any]
k_normkey = zip(mapping.keys(), new_mapping.keys()) # noqa: B905
return [k for k, nkey in k_normkey if nkey not in failed_keys]
def delete(self, key: str) -> bool:
key = self._normalize_key(key)
if _test_memcached_key(key):
return bool(self._client.delete(key))
return False
def delete_many(self, *keys: str) -> _t.List[_t.Any]:
new_keys = []
for key in keys:
key = self._normalize_key(key)
if _test_memcached_key(key):
new_keys.append(key)
self._client.delete_multi(new_keys)
return [k for k in new_keys if not self.has(k)]
def has(self, key: str) -> bool:
key = self._normalize_key(key)
if _test_memcached_key(key):
return bool(self._client.append(key, ""))
return False
def clear(self) -> bool:
return bool(self._client.flush_all())
def inc(self, key: str, delta: int = 1) -> _t.Optional[int]:
key = self._normalize_key(key)
value = (self._client.get(key) or 0) + delta
return value if self.set(key, value) else None
def dec(self, key: str, delta: int = 1) -> _t.Optional[int]:
key = self._normalize_key(key)
value = (self._client.get(key) or 0) - delta
return value if self.set(key, value) else None
def import_preferred_memcache_lib(self, servers: _t.Any) -> _t.Any:
"""Returns an initialized memcache client. Used by the constructor."""
try:
import pylibmc # type: ignore
except ImportError:
pass
else:
return pylibmc.Client(servers)
try:
from google.appengine.api import memcache # type: ignore
except ImportError:
pass
else:
return memcache.Client()
try:
import memcache # type: ignore
except ImportError:
pass
else:
return memcache.Client(servers)
try:
import libmc # type: ignore
except ImportError:
pass
else:
return libmc.Client(servers)

@ -0,0 +1,202 @@
import datetime
import logging
import typing as _t
from cachelib.base import BaseCache
from cachelib.serializers import BaseSerializer
class MongoDbCache(BaseCache):
"""
Implementation of cachelib.BaseCache that uses mongodb collection
as the backend.
Limitations: maximum MongoDB document size is 16mb
:param client: mongodb client or connection string
:param db: mongodb database name
:param collection: mongodb collection name
:param default_timeout: Set the timeout in seconds after which cache entries
expire
:param key_prefix: A prefix that should be added to all keys.
"""
serializer = BaseSerializer()
def __init__(
self,
client: _t.Any = None,
db: _t.Optional[str] = "cache-db",
collection: _t.Optional[str] = "cache-collection",
default_timeout: int = 300,
key_prefix: _t.Optional[str] = None,
**kwargs: _t.Any
):
super().__init__(default_timeout)
try:
import pymongo # type: ignore
except ImportError:
logging.warning("no pymongo module found")
if client is None or isinstance(client, str):
client = pymongo.MongoClient(host=client)
self.client = client[db][collection]
index_info = self.client.index_information()
all_keys = {
subkey[0] for value in index_info.values() for subkey in value["key"]
}
if "id" not in all_keys:
self.client.create_index("id", unique=True)
self.key_prefix = key_prefix or ""
self.collection = collection
def _utcnow(self) -> _t.Any:
"""Return a tz-aware UTC datetime representing the current time"""
return datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc)
def _expire_records(self) -> _t.Any:
res = self.client.delete_many({"expiration": {"$lte": self._utcnow()}})
return res
def get(self, key: str) -> _t.Any:
"""
Get a cache item
:param key: The cache key of the item to fetch
:return: cache value if not expired, else None
"""
self._expire_records()
record = self.client.find_one({"id": self.key_prefix + key})
value = None
if record:
value = self.serializer.loads(record["val"])
return value
def delete(self, key: str) -> bool:
"""
Deletes an item from the cache. This is a no-op if the item doesn't
exist
:param key: Key of the item to delete.
:return: True if the key existed and was deleted
"""
res = self.client.delete_one({"id": self.key_prefix + key})
deleted = bool(res.deleted_count > 0)
return deleted
def _set(
self,
key: str,
value: _t.Any,
timeout: _t.Optional[int] = None,
overwrite: _t.Optional[bool] = True,
) -> _t.Any:
"""
Store a cache item, with the option to not overwrite existing items
:param key: Cache key to use
:param value: a serializable object
:param timeout: The timeout in seconds for the cached item, to override
the default
:param overwrite: If true, overwrite any existing cache item with key.
If false, the new value will only be stored if no
non-expired cache item exists with key.
:return: True if the new item was stored.
"""
timeout = self._normalize_timeout(timeout)
now = self._utcnow()
if not overwrite:
# fail if a non-expired item with this key
# already exists
if self.has(key):
return False
dump = self.serializer.dumps(value)
record = {"id": self.key_prefix + key, "val": dump}
if timeout > 0:
record["expiration"] = now + datetime.timedelta(seconds=timeout)
self.client.update_one({"id": self.key_prefix + key}, {"$set": record}, True)
return True
def set(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any:
self._expire_records()
return self._set(key, value, timeout=timeout, overwrite=True)
def set_many(
self, mapping: _t.Dict[str, _t.Any], timeout: _t.Optional[int] = None
) -> _t.List[_t.Any]:
self._expire_records()
from pymongo import UpdateOne
operations = []
now = self._utcnow()
timeout = self._normalize_timeout(timeout)
for key, val in mapping.items():
dump = self.serializer.dumps(val)
record = {"id": self.key_prefix + key, "val": dump}
if timeout > 0:
record["expiration"] = now + datetime.timedelta(seconds=timeout)
operations.append(
UpdateOne({"id": self.key_prefix + key}, {"$set": record}, upsert=True),
)
result = self.client.bulk_write(operations)
keys = list(mapping.keys())
if result.bulk_api_result["nUpserted"] != len(keys):
query = self.client.find(
{"id": {"$in": [self.key_prefix + key for key in keys]}}
)
keys = []
for item in query:
keys.append(item["id"])
return keys
def get_many(self, *keys: str) -> _t.List[_t.Any]:
results = self.get_dict(*keys)
values = []
for key in keys:
values.append(results.get(key, None))
return values
def get_dict(self, *keys: str) -> _t.Dict[str, _t.Any]:
self._expire_records()
query = self.client.find(
{"id": {"$in": [self.key_prefix + key for key in keys]}}
)
results = dict.fromkeys(keys, None)
for item in query:
value = self.serializer.loads(item["val"])
results[item["id"][len(self.key_prefix) :]] = value
return results
def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any:
self._expire_records()
return self._set(key, value, timeout=timeout, overwrite=False)
def has(self, key: str) -> bool:
self._expire_records()
record = self.get(key)
return record is not None
def delete_many(self, *keys: str) -> _t.List[_t.Any]:
self._expire_records()
res = list(keys)
filter = {"id": {"$in": [self.key_prefix + key for key in keys]}}
result = self.client.delete_many(filter)
if result.deleted_count != len(keys):
existing_keys = [
item["id"][len(self.key_prefix) :] for item in self.client.find(filter)
]
res = [item for item in keys if item not in existing_keys]
return res
def clear(self) -> bool:
self.client.drop()
return True

@ -0,0 +1,159 @@
import typing as _t
from cachelib.base import BaseCache
from cachelib.serializers import RedisSerializer
class RedisCache(BaseCache):
"""Uses the Redis key-value store as a cache backend.
The first argument can be either a string denoting address of the Redis
server or an object resembling an instance of a redis.Redis class.
Note: Python Redis API already takes care of encoding unicode strings on
the fly.
:param host: address of the Redis server or an object which API is
compatible with the official Python Redis client (redis-py).
:param port: port number on which Redis server listens for connections.
:param password: password authentication for the Redis server.
:param db: db (zero-based numeric index) on Redis Server to connect.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`. A timeout of
0 indicates that the cache never expires.
:param key_prefix: A prefix that should be added to all keys.
Any additional keyword arguments will be passed to ``redis.Redis``.
"""
_read_client: _t.Any = None
_write_client: _t.Any = None
serializer = RedisSerializer()
def __init__(
self,
host: _t.Any = "localhost",
port: int = 6379,
password: _t.Optional[str] = None,
db: int = 0,
default_timeout: int = 300,
key_prefix: _t.Optional[_t.Union[str, _t.Callable[[], str]]] = None,
**kwargs: _t.Any,
):
BaseCache.__init__(self, default_timeout)
if host is None:
raise ValueError("RedisCache host parameter may not be None")
if isinstance(host, str):
try:
import redis
except ImportError as err:
raise RuntimeError("no redis module found") from err
if kwargs.get("decode_responses", None):
raise ValueError("decode_responses is not supported by RedisCache.")
self._write_client = self._read_client = redis.Redis(
host=host, port=port, password=password, db=db, **kwargs
)
else:
self._read_client = self._write_client = host
self.key_prefix = key_prefix or ""
def _get_prefix(self) -> str:
return (
self.key_prefix if isinstance(self.key_prefix, str) else self.key_prefix()
)
def _normalize_timeout(self, timeout: _t.Optional[int]) -> int:
"""Normalize timeout by setting it to default of 300 if
not defined (None) or -1 if explicitly set to zero.
:param timeout: timeout to normalize.
"""
timeout = BaseCache._normalize_timeout(self, timeout)
if timeout == 0:
timeout = -1
return timeout
def get(self, key: str) -> _t.Any:
return self.serializer.loads(
self._read_client.get(f"{self._get_prefix()}{key}")
)
def get_many(self, *keys: str) -> _t.List[_t.Any]:
if self.key_prefix:
prefixed_keys = [f"{self._get_prefix()}{key}" for key in keys]
else:
prefixed_keys = list(keys)
return [self.serializer.loads(x) for x in self._read_client.mget(prefixed_keys)]
def set(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any:
timeout = self._normalize_timeout(timeout)
dump = self.serializer.dumps(value)
if timeout == -1:
result = self._write_client.set(
name=f"{self._get_prefix()}{key}", value=dump
)
else:
result = self._write_client.setex(
name=f"{self._get_prefix()}{key}", value=dump, time=timeout
)
return result
def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any:
timeout = self._normalize_timeout(timeout)
dump = self.serializer.dumps(value)
created = self._write_client.setnx(
name=f"{self._get_prefix()}{key}", value=dump
)
# handle case where timeout is explicitly set to zero
if created and timeout != -1:
self._write_client.expire(name=f"{self._get_prefix()}{key}", time=timeout)
return created
def set_many(
self, mapping: _t.Dict[str, _t.Any], timeout: _t.Optional[int] = None
) -> _t.List[_t.Any]:
timeout = self._normalize_timeout(timeout)
# Use transaction=False to batch without calling redis MULTI
# which is not supported by twemproxy
pipe = self._write_client.pipeline(transaction=False)
for key, value in mapping.items():
dump = self.serializer.dumps(value)
if timeout == -1:
pipe.set(name=f"{self._get_prefix()}{key}", value=dump)
else:
pipe.setex(name=f"{self._get_prefix()}{key}", value=dump, time=timeout)
results = pipe.execute()
return [k for k, was_set in zip(mapping.keys(), results) if was_set]
def delete(self, key: str) -> bool:
return bool(self._write_client.delete(f"{self._get_prefix()}{key}"))
def delete_many(self, *keys: str) -> _t.List[_t.Any]:
if not keys:
return []
if self.key_prefix:
prefixed_keys = [f"{self._get_prefix()}{key}" for key in keys]
else:
prefixed_keys = [k for k in keys]
self._write_client.delete(*prefixed_keys)
return [k for k in prefixed_keys if not self.has(k)]
def has(self, key: str) -> bool:
return bool(self._read_client.exists(f"{self._get_prefix()}{key}"))
def clear(self) -> bool:
status = 0
if self.key_prefix:
keys = self._read_client.keys(self._get_prefix() + "*")
if keys:
status = self._write_client.delete(*keys)
else:
status = self._write_client.flushdb()
return bool(status)
def inc(self, key: str, delta: int = 1) -> _t.Any:
return self._write_client.incr(name=f"{self._get_prefix()}{key}", amount=delta)
def dec(self, key: str, delta: int = 1) -> _t.Any:
return self._write_client.incr(name=f"{self._get_prefix()}{key}", amount=-delta)

@ -0,0 +1,112 @@
import logging
import pickle
import typing as _t
class BaseSerializer:
"""This is the base interface for all default serializers.
BaseSerializer.load and BaseSerializer.dump will
default to pickle.load and pickle.dump. This is currently
used only by FileSystemCache which dumps/loads to/from a file stream.
"""
def _warn(self, e: pickle.PickleError) -> None:
logging.warning(
f"An exception has been raised during a pickling operation: {e}"
)
def dump(
self, value: int, f: _t.IO, protocol: int = pickle.HIGHEST_PROTOCOL
) -> None:
try:
pickle.dump(value, f, protocol)
except (pickle.PickleError, pickle.PicklingError) as e:
self._warn(e)
def load(self, f: _t.BinaryIO) -> _t.Any:
try:
data = pickle.load(f)
except pickle.PickleError as e:
self._warn(e)
return None
else:
return data
"""BaseSerializer.loads and BaseSerializer.dumps
work on top of pickle.loads and pickle.dumps. Dumping/loading
strings and byte strings is the default for most cache types.
"""
def dumps(self, value: _t.Any, protocol: int = pickle.HIGHEST_PROTOCOL) -> bytes:
try:
serialized = pickle.dumps(value, protocol)
except (pickle.PickleError, pickle.PicklingError) as e:
self._warn(e)
return serialized
def loads(self, bvalue: bytes) -> _t.Any:
try:
data = pickle.loads(bvalue)
except pickle.PickleError as e:
self._warn(e)
return None
else:
return data
"""Default serializers for each cache type.
The following classes can be used to further customize
serialiation behaviour. Alternatively, any serializer can be
overriden in order to use a custom serializer with a different
strategy altogether.
"""
class UWSGISerializer(BaseSerializer):
"""Default serializer for UWSGICache."""
class SimpleSerializer(BaseSerializer):
"""Default serializer for SimpleCache."""
class FileSystemSerializer(BaseSerializer):
"""Default serializer for FileSystemCache."""
class RedisSerializer(BaseSerializer):
"""Default serializer for RedisCache."""
def dumps(self, value: _t.Any, protocol: int = pickle.HIGHEST_PROTOCOL) -> bytes:
"""Dumps an object into a string for redis, using pickle by default."""
return b"!" + pickle.dumps(value, protocol)
def loads(self, value: _t.Optional[bytes]) -> _t.Any:
"""The reversal of :meth:`dump_object`. This might be called with
None.
"""
if value is None:
return None
if value.startswith(b"!"):
try:
return pickle.loads(value[1:])
except pickle.PickleError:
return None
try:
return int(value)
except ValueError:
# before 0.8 we did not have serialization. Still support that.
return value
class DynamoDbSerializer(RedisSerializer):
"""Default serializer for DynamoDbCache."""
def loads(self, value: _t.Any) -> _t.Any:
"""The reversal of :meth:`dump_object`. This might be called with
None.
"""
value = value.value
return super().loads(value)

@ -0,0 +1,100 @@
import typing as _t
from time import time
from cachelib.base import BaseCache
from cachelib.serializers import SimpleSerializer
class SimpleCache(BaseCache):
"""Simple memory cache for single process environments. This class exists
mainly for the development server and is not 100% thread safe. It tries
to use as many atomic operations as possible and no locks for simplicity
but it could happen under heavy load that keys are added multiple times.
:param threshold: the maximum number of items the cache stores before
it starts deleting some.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`. A timeout of
0 indicates that the cache never expires.
"""
serializer = SimpleSerializer()
def __init__(
self,
threshold: int = 500,
default_timeout: int = 300,
):
BaseCache.__init__(self, default_timeout)
self._cache: _t.Dict[str, _t.Any] = {}
self._threshold = threshold or 500 # threshold = 0
def _over_threshold(self) -> bool:
return len(self._cache) > self._threshold
def _remove_expired(self, now: float) -> None:
toremove = [k for k, (expires, _) in self._cache.items() if expires < now]
for k in toremove:
self._cache.pop(k, None)
def _remove_older(self) -> None:
k_ordered = (
k for k, v in sorted(self._cache.items(), key=lambda item: item[1][0])
)
for k in k_ordered:
self._cache.pop(k, None)
if not self._over_threshold():
break
def _prune(self) -> None:
if self._over_threshold():
now = time()
self._remove_expired(now)
# remove older items if still over threshold
if self._over_threshold():
self._remove_older()
def _normalize_timeout(self, timeout: _t.Optional[int]) -> int:
timeout = BaseCache._normalize_timeout(self, timeout)
if timeout > 0:
timeout = int(time()) + timeout
return timeout
def get(self, key: str) -> _t.Any:
try:
expires, value = self._cache[key]
if expires == 0 or expires > time():
return self.serializer.loads(value)
except KeyError:
return None
def set(
self, key: str, value: _t.Any, timeout: _t.Optional[int] = None
) -> _t.Optional[bool]:
expires = self._normalize_timeout(timeout)
self._prune()
self._cache[key] = (expires, self.serializer.dumps(value))
return True
def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool:
expires = self._normalize_timeout(timeout)
self._prune()
item = (expires, self.serializer.dumps(value))
if key in self._cache:
return False
self._cache.setdefault(key, item)
return True
def delete(self, key: str) -> bool:
return self._cache.pop(key, None) is not None
def has(self, key: str) -> bool:
try:
expires, value = self._cache[key]
return bool(expires == 0 or expires > time())
except KeyError:
return False
def clear(self) -> bool:
self._cache.clear()
return not bool(self._cache)

@ -0,0 +1,83 @@
import platform
import typing as _t
from cachelib.base import BaseCache
from cachelib.serializers import UWSGISerializer
class UWSGICache(BaseCache):
"""Implements the cache using uWSGI's caching framework.
.. note::
This class cannot be used when running under PyPy, because the uWSGI
API implementation for PyPy is lacking the needed functionality.
:param default_timeout: The default timeout in seconds.
:param cache: The name of the caching instance to connect to, for
example: mycache@localhost:3031, defaults to an empty string, which
means uWSGI will cache in the local instance. If the cache is in the
same instance as the werkzeug app, you only have to provide the name of
the cache.
"""
serializer = UWSGISerializer()
def __init__(
self,
default_timeout: int = 300,
cache: str = "",
):
BaseCache.__init__(self, default_timeout)
if platform.python_implementation() == "PyPy":
raise RuntimeError(
"uWSGI caching does not work under PyPy, see "
"the docs for more details."
)
try:
import uwsgi # type: ignore
self._uwsgi = uwsgi
except ImportError as err:
raise RuntimeError(
"uWSGI could not be imported, are you running under uWSGI?"
) from err
self.cache = cache
def get(self, key: str) -> _t.Any:
rv = self._uwsgi.cache_get(key, self.cache)
if rv is None:
return
return self.serializer.loads(rv)
def delete(self, key: str) -> bool:
return bool(self._uwsgi.cache_del(key, self.cache))
def set(
self, key: str, value: _t.Any, timeout: _t.Optional[int] = None
) -> _t.Optional[bool]:
result = self._uwsgi.cache_update(
key,
self.serializer.dumps(value),
self._normalize_timeout(timeout),
self.cache,
) # type: bool
return result
def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool:
return bool(
self._uwsgi.cache_set(
key,
self.serializer.dumps(value),
self._normalize_timeout(timeout),
self.cache,
)
)
def clear(self) -> bool:
return bool(self._uwsgi.cache_clear(self.cache))
def has(self, key: str) -> bool:
return self._uwsgi.cache_exists(key, self.cache) is not None

@ -0,0 +1,20 @@
This package contains a modified version of ca-bundle.crt:
ca-bundle.crt -- Bundle of CA Root Certificates
This is a bundle of X.509 certificates of public Certificate Authorities
(CA). These were automatically extracted from Mozilla's root certificates
file (certdata.txt). This file can be found in the mozilla source tree:
https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt
It contains the certificates in PEM format and therefore
can be directly used with curl / libcurl / php_curl, or with
an Apache+mod_ssl webserver for SSL client authentication.
Just configure this file as the SSLCACertificateFile.#
***** BEGIN LICENSE BLOCK *****
This Source Code Form is subject to the terms of the Mozilla Public License,
v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
one at http://mozilla.org/MPL/2.0/.
***** END LICENSE BLOCK *****
@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $

@ -0,0 +1,67 @@
Metadata-Version: 2.1
Name: certifi
Version: 2024.8.30
Summary: Python package for providing Mozilla's CA Bundle.
Home-page: https://github.com/certifi/python-certifi
Author: Kenneth Reitz
Author-email: me@kennethreitz.com
License: MPL-2.0
Project-URL: Source, https://github.com/certifi/python-certifi
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
Classifier: Natural Language :: English
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Requires-Python: >=3.6
License-File: LICENSE
Certifi: Python SSL Certificates
================================
Certifi provides Mozilla's carefully curated collection of Root Certificates for
validating the trustworthiness of SSL certificates while verifying the identity
of TLS hosts. It has been extracted from the `Requests`_ project.
Installation
------------
``certifi`` is available on PyPI. Simply install it with ``pip``::
$ pip install certifi
Usage
-----
To reference the installed certificate authority (CA) bundle, you can use the
built-in function::
>>> import certifi
>>> certifi.where()
'/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
Or from the command line::
$ python -m certifi
/usr/local/lib/python3.7/site-packages/certifi/cacert.pem
Enjoy!
.. _`Requests`: https://requests.readthedocs.io/en/master/
Addition/Removal of Certificates
--------------------------------
Certifi does not support any addition/removal or other modification of the
CA trust store content. This project is intended to provide a reliable and
highly portable root of trust to python deployments. Look to upstream projects
for methods to use alternate trust.

@ -0,0 +1,14 @@
certifi-2024.8.30.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
certifi-2024.8.30.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989
certifi-2024.8.30.dist-info/METADATA,sha256=GhBHRVUN6a4ZdUgE_N5wmukJfyuoE-QyIl8Y3ifNQBM,2222
certifi-2024.8.30.dist-info/RECORD,,
certifi-2024.8.30.dist-info/WHEEL,sha256=UvcQYKBHoFqaQd6LKyqHw9fxEolWLQnlzP0h_LgJAfI,91
certifi-2024.8.30.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
certifi/__init__.py,sha256=p_GYZrjUwPBUhpLlCZoGb0miKBKSqDAyZC5DvIuqbHQ,94
certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
certifi/__pycache__/__init__.cpython-312.pyc,,
certifi/__pycache__/__main__.cpython-312.pyc,,
certifi/__pycache__/core.cpython-312.pyc,,
certifi/cacert.pem,sha256=lO3rZukXdPyuk6BWUJFOKQliWaXH6HGh9l1GGrUgG0c,299427
certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426
certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: setuptools (74.0.0)
Root-Is-Purelib: true
Tag: py3-none-any

@ -0,0 +1,4 @@
from .core import contents, where
__all__ = ["contents", "where"]
__version__ = "2024.08.30"

@ -0,0 +1,12 @@
import argparse
from certifi import contents, where
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--contents", action="store_true")
args = parser.parse_args()
if args.contents:
print(contents())
else:
print(where())

File diff suppressed because it is too large Load Diff

@ -0,0 +1,114 @@
"""
certifi.py
~~~~~~~~~~
This module returns the installation location of cacert.pem or its contents.
"""
import sys
import atexit
def exit_cacert_ctx() -> None:
_CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
if sys.version_info >= (3, 11):
from importlib.resources import as_file, files
_CACERT_CTX = None
_CACERT_PATH = None
def where() -> str:
# This is slightly terrible, but we want to delay extracting the file
# in cases where we're inside of a zipimport situation until someone
# actually calls where(), but we don't want to re-extract the file
# on every call of where(), so we'll do it once then store it in a
# global variable.
global _CACERT_CTX
global _CACERT_PATH
if _CACERT_PATH is None:
# This is slightly janky, the importlib.resources API wants you to
# manage the cleanup of this file, so it doesn't actually return a
# path, it returns a context manager that will give you the path
# when you enter it and will do any cleanup when you leave it. In
# the common case of not needing a temporary file, it will just
# return the file system location and the __exit__() is a no-op.
#
# We also have to hold onto the actual context manager, because
# it will do the cleanup whenever it gets garbage collected, so
# we will also store that at the global level as well.
_CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
_CACERT_PATH = str(_CACERT_CTX.__enter__())
atexit.register(exit_cacert_ctx)
return _CACERT_PATH
def contents() -> str:
return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii")
elif sys.version_info >= (3, 7):
from importlib.resources import path as get_path, read_text
_CACERT_CTX = None
_CACERT_PATH = None
def where() -> str:
# This is slightly terrible, but we want to delay extracting the
# file in cases where we're inside of a zipimport situation until
# someone actually calls where(), but we don't want to re-extract
# the file on every call of where(), so we'll do it once then store
# it in a global variable.
global _CACERT_CTX
global _CACERT_PATH
if _CACERT_PATH is None:
# This is slightly janky, the importlib.resources API wants you
# to manage the cleanup of this file, so it doesn't actually
# return a path, it returns a context manager that will give
# you the path when you enter it and will do any cleanup when
# you leave it. In the common case of not needing a temporary
# file, it will just return the file system location and the
# __exit__() is a no-op.
#
# We also have to hold onto the actual context manager, because
# it will do the cleanup whenever it gets garbage collected, so
# we will also store that at the global level as well.
_CACERT_CTX = get_path("certifi", "cacert.pem")
_CACERT_PATH = str(_CACERT_CTX.__enter__())
atexit.register(exit_cacert_ctx)
return _CACERT_PATH
def contents() -> str:
return read_text("certifi", "cacert.pem", encoding="ascii")
else:
import os
import types
from typing import Union
Package = Union[types.ModuleType, str]
Resource = Union[str, "os.PathLike"]
# This fallback will work for Python versions prior to 3.7 that lack the
# importlib.resources module but relies on the existing `where` function
# so won't address issues with environments like PyOxidizer that don't set
# __file__ on modules.
def read_text(
package: Package,
resource: Resource,
encoding: str = 'utf-8',
errors: str = 'strict'
) -> str:
with open(where(), encoding=encoding) as data:
return data.read()
# If we don't have importlib.resources, then we will just do the old logic
# of assuming we're on the filesystem and munge the path directly.
def where() -> str:
f = os.path.dirname(__file__)
return os.path.join(f, "cacert.pem")
def contents() -> str:
return read_text("certifi", "cacert.pem", encoding="ascii")

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2019 TAHRI Ahmed R.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,695 @@
Metadata-Version: 2.1
Name: charset-normalizer
Version: 3.4.0
Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.
Home-page: https://github.com/Ousret/charset_normalizer
Author: Ahmed TAHRI
Author-email: tahri.ahmed@proton.me
License: MIT
Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues
Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest
Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect
Classifier: Development Status :: 5 - Production/Stable
Classifier: License :: OSI Approved :: MIT License
Classifier: Intended Audience :: Developers
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Text Processing :: Linguistic
Classifier: Topic :: Utilities
Classifier: Typing :: Typed
Requires-Python: >=3.7.0
Description-Content-Type: text/markdown
License-File: LICENSE
Provides-Extra: unicode_backport
<h1 align="center">Charset Detection, for Everyone 👋</h1>
<p align="center">
<sup>The Real First Universal Charset Detector</sup><br>
<a href="https://pypi.org/project/charset-normalizer">
<img src="https://img.shields.io/pypi/pyversions/charset_normalizer.svg?orange=blue" />
</a>
<a href="https://pepy.tech/project/charset-normalizer/">
<img alt="Download Count Total" src="https://static.pepy.tech/badge/charset-normalizer/month" />
</a>
<a href="https://bestpractices.coreinfrastructure.org/projects/7297">
<img src="https://bestpractices.coreinfrastructure.org/projects/7297/badge">
</a>
</p>
<p align="center">
<sup><i>Featured Packages</i></sup><br>
<a href="https://github.com/jawah/niquests">
<img alt="Static Badge" src="https://img.shields.io/badge/Niquests-HTTP_1.1%2C%202%2C_and_3_Client-cyan">
</a>
<a href="https://github.com/jawah/wassima">
<img alt="Static Badge" src="https://img.shields.io/badge/Wassima-Certifi_Killer-cyan">
</a>
</p>
<p align="center">
<sup><i>In other language (unofficial port - by the community)</i></sup><br>
<a href="https://github.com/nickspring/charset-normalizer-rs">
<img alt="Static Badge" src="https://img.shields.io/badge/Rust-red">
</a>
</p>
> A library that helps you read text from an unknown charset encoding.<br /> Motivated by `chardet`,
> I'm trying to resolve the issue by taking a new approach.
> All IANA character set names for which the Python core library provides codecs are supported.
<p align="center">
>>>>> <a href="https://charsetnormalizerweb.ousret.now.sh" target="_blank">👉 Try Me Online Now, Then Adopt Me 👈 </a> <<<<<
</p>
This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.
| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |
|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|
| `Fast` | ❌ | ✅ | ✅ |
| `Universal**` | ❌ | ✅ | ❌ |
| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ |
| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ |
| `License` | LGPL-2.1<br>_restrictive_ | MIT | MPL-1.1<br>_restrictive_ |
| `Native Python` | ✅ | ✅ | ❌ |
| `Detect spoken language` | ❌ | ✅ | N/A |
| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ |
| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |
| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |
<p align="center">
<img src="https://i.imgflip.com/373iay.gif" alt="Reading Normalized Text" width="226"/><img src="https://media.tenor.com/images/c0180f70732a18b4965448d33adba3d0/tenor.gif" alt="Cat Reading Text" width="200"/>
</p>
*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*<br>
Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html)
## ⚡ Performance
This package offer better performance than its counterpart Chardet. Here are some numbers.
| Package | Accuracy | Mean per file (ms) | File per sec (est) |
|-----------------------------------------------|:--------:|:------------------:|:------------------:|
| [chardet](https://github.com/chardet/chardet) | 86 % | 200 ms | 5 file/sec |
| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |
| Package | 99th percentile | 95th percentile | 50th percentile |
|-----------------------------------------------|:---------------:|:---------------:|:---------------:|
| [chardet](https://github.com/chardet/chardet) | 1200 ms | 287 ms | 23 ms |
| charset-normalizer | 100 ms | 50 ms | 5 ms |
Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.
> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.
> And yes, these results might change at any time. The dataset can be updated to include more files.
> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.
> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability
> (eg. Supported Encoding) Challenge-them if you want.
## ✨ Installation
Using pip:
```sh
pip install charset-normalizer -U
```
## 🚀 Basic Usage
### CLI
This package comes with a CLI.
```
usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]
file [file ...]
The Real First Universal Charset Detector. Discover originating encoding used
on text file. Normalize text to unicode.
positional arguments:
files File(s) to be analysed
optional arguments:
-h, --help show this help message and exit
-v, --verbose Display complementary information about file if any.
Stdout will contain logs about the detection process.
-a, --with-alternative
Output complementary possibilities if any. Top-level
JSON WILL be a list.
-n, --normalize Permit to normalize input file. If not set, program
does not write anything.
-m, --minimal Only output the charset detected to STDOUT. Disabling
JSON output.
-r, --replace Replace file when trying to normalize it instead of
creating a new one.
-f, --force Replace file without asking if you are sure, use this
flag with caution.
-t THRESHOLD, --threshold THRESHOLD
Define a custom maximum amount of chaos allowed in
decoded content. 0. <= chaos <= 1.
--version Show version information and exit.
```
```bash
normalizer ./data/sample.1.fr.srt
```
or
```bash
python -m charset_normalizer ./data/sample.1.fr.srt
```
🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.
```json
{
"path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt",
"encoding": "cp1252",
"encoding_aliases": [
"1252",
"windows_1252"
],
"alternative_encodings": [
"cp1254",
"cp1256",
"cp1258",
"iso8859_14",
"iso8859_15",
"iso8859_16",
"iso8859_3",
"iso8859_9",
"latin_1",
"mbcs"
],
"language": "French",
"alphabets": [
"Basic Latin",
"Latin-1 Supplement"
],
"has_sig_or_bom": false,
"chaos": 0.149,
"coherence": 97.152,
"unicode_path": null,
"is_preferred": true
}
```
### Python
*Just print out normalized text*
```python
from charset_normalizer import from_path
results = from_path('./my_subtitle.srt')
print(str(results.best()))
```
*Upgrade your code without effort*
```python
from charset_normalizer import detect
```
The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.
See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)
## 😇 Why
When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a
reliable alternative using a completely different method. Also! I never back down on a good challenge!
I **don't care** about the **originating charset** encoding, because **two different tables** can
produce **two identical rendered string.**
What I want is to get readable text, the best I can.
In a way, **I'm brute forcing text decoding.** How cool is that ? 😎
Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.
## 🍰 How
- Discard all charset encoding table that could not fit the binary content.
- Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.
- Extract matches with the lowest mess detected.
- Additionally, we measure coherence / probe for a language.
**Wait a minute**, what is noise/mess and coherence according to **YOU ?**
*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then
**I established** some ground rules about **what is obvious** when **it seems like** a mess.
I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to
improve or rewrite it.
*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought
that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.
## ⚡ Known limitations
- Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))
- Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.
## ⚠️ About Python EOLs
**If you are running:**
- Python >=2.7,<3.5: Unsupported
- Python 3.5: charset-normalizer < 2.1
- Python 3.6: charset-normalizer < 3.1
- Python 3.7: charset-normalizer < 4.0
Upgrade your Python interpreter as soon as possible.
## 👤 Contributing
Contributions, issues and feature requests are very much welcome.<br />
Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.
## 📝 License
Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).<br />
This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.
Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/)
## 💼 For Enterprise
Professional support for charset-normalizer is available as part of the [Tidelift
Subscription][1]. Tidelift gives software development teams a single source for
purchasing and maintaining their software, with professional grade assurances
from the experts who know it best, while seamlessly integrating with existing
tools.
[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme
# Changelog
All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08)
### Added
- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints.
- Support for Python 3.13 (#512)
### Fixed
- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.
- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537)
- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381)
## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)
### Fixed
- Unintentional memory usage regression when using large payload that match several encoding (#376)
- Regression on some detection case showcased in the documentation (#371)
### Added
- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)
## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)
### Changed
- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8
- Improved the general detection reliability based on reports from the community
## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)
### Added
- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`
- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)
### Removed
- (internal) Redundant utils.is_ascii function and unused function is_private_use_only
- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant
### Changed
- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection
- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8
### Fixed
- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350)
## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)
### Changed
- Typehint for function `from_path` no longer enforce `PathLike` as its first argument
- Minor improvement over the global detection reliability
### Added
- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries
- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)
- Explicit support for Python 3.12
### Fixed
- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)
## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)
### Added
- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)
### Removed
- Support for Python 3.6 (PR #260)
### Changed
- Optional speedup provided by mypy/c 1.0.1
## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)
### Fixed
- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)
### Changed
- Speedup provided by mypy/c 0.990 on Python >= 3.7
## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)
### Added
- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
### Changed
- Build with static metadata using 'build' frontend
- Make the language detection stricter
- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
### Fixed
- CLI with opt --normalize fail when using full path for files
- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
- Sphinx warnings when generating the documentation
### Removed
- Coherence detector no longer return 'Simple English' instead return 'English'
- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
- Breaking: Method `first()` and `best()` from CharsetMatch
- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
- Breaking: Top-level function `normalize`
- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
- Support for the backport `unicodedata2`
## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)
### Added
- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
### Changed
- Build with static metadata using 'build' frontend
- Make the language detection stricter
### Fixed
- CLI with opt --normalize fail when using full path for files
- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
### Removed
- Coherence detector no longer return 'Simple English' instead return 'English'
- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)
### Added
- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
### Removed
- Breaking: Method `first()` and `best()` from CharsetMatch
- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
### Fixed
- Sphinx warnings when generating the documentation
## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)
### Changed
- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
### Removed
- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
- Breaking: Top-level function `normalize`
- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
- Support for the backport `unicodedata2`
## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)
### Deprecated
- Function `normalize` scheduled for removal in 3.0
### Changed
- Removed useless call to decode in fn is_unprintable (#206)
### Fixed
- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)
## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)
### Added
- Output the Unicode table version when running the CLI with `--version` (PR #194)
### Changed
- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)
- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)
### Fixed
- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)
- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)
### Removed
- Support for Python 3.5 (PR #192)
### Deprecated
- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)
## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)
### Fixed
- ASCII miss-detection on rare cases (PR #170)
## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)
### Added
- Explicit support for Python 3.11 (PR #164)
### Changed
- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)
## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)
### Fixed
- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)
### Changed
- Skipping the language-detection (CD) on ASCII (PR #155)
## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)
### Changed
- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)
### Fixed
- Wrong logging level applied when setting kwarg `explain` to True (PR #146)
## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)
### Changed
- Improvement over Vietnamese detection (PR #126)
- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)
- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)
- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)
- Code style as refactored by Sourcery-AI (PR #131)
- Minor adjustment on the MD around european words (PR #133)
- Remove and replace SRTs from assets / tests (PR #139)
- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)
- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)
### Fixed
- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)
- Avoid using too insignificant chunk (PR #137)
### Added
- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)
- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)
## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)
### Added
- Add support for Kazakh (Cyrillic) language detection (PR #109)
### Changed
- Further, improve inferring the language from a given single-byte code page (PR #112)
- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)
- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)
- Various detection improvement (MD+CD) (PR #117)
### Removed
- Remove redundant logging entry about detected language(s) (PR #115)
### Fixed
- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)
## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)
### Fixed
- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)
- Fix CLI crash when using --minimal output in certain cases (PR #103)
### Changed
- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)
## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)
### Changed
- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)
- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)
- The Unicode detection is slightly improved (PR #93)
- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91)
### Removed
- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)
### Fixed
- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)
- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)
- The MANIFEST.in was not exhaustive (PR #78)
## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)
### Fixed
- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)
- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)
- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)
- Submatch factoring could be wrong in rare edge cases (PR #72)
- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)
- Fix line endings from CRLF to LF for certain project files (PR #67)
### Changed
- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)
- Allow fallback on specified encoding if any (PR #71)
## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)
### Changed
- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)
- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)
## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)
### Fixed
- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)
### Changed
- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)
## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)
### Fixed
- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)
- Using explain=False permanently disable the verbose output in the current runtime (PR #47)
- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)
- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)
### Changed
- Public function normalize default args values were not aligned with from_bytes (PR #53)
### Added
- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)
## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)
### Changed
- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.
- Accent has been made on UTF-8 detection, should perform rather instantaneous.
- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.
- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)
- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+
- utf_7 detection has been reinstated.
### Removed
- This package no longer require anything when used with Python 3.5 (Dropped cached_property)
- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.
- The exception hook on UnicodeDecodeError has been removed.
### Deprecated
- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0
### Fixed
- The CLI output used the relative path of the file(s). Should be absolute.
## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)
### Fixed
- Logger configuration/usage no longer conflict with others (PR #44)
## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)
### Removed
- Using standard logging instead of using the package loguru.
- Dropping nose test framework in favor of the maintained pytest.
- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.
- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.
- Stop support for UTF-7 that does not contain a SIG.
- Dropping PrettyTable, replaced with pure JSON output in CLI.
### Fixed
- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.
- Not searching properly for the BOM when trying utf32/16 parent codec.
### Changed
- Improving the package final size by compressing frequencies.json.
- Huge improvement over the larges payload.
### Added
- CLI now produces JSON consumable output.
- Return ASCII if given sequences fit. Given reasonable confidence.
## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)
### Fixed
- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)
## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)
### Fixed
- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)
## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)
### Fixed
- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)
## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)
### Changed
- Amend the previous release to allow prettytable 2.0 (PR #35)
## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)
### Fixed
- Fix error while using the package with a python pre-release interpreter (PR #33)
### Changed
- Dependencies refactoring, constraints revised.
### Added
- Add python 3.9 and 3.10 to the supported interpreters
MIT License
Copyright (c) 2019 TAHRI Ahmed R.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,35 @@
../../../bin/normalizer,sha256=JTs1fcR2AEVygf-kmSsOiW5XOXQbSw4NHWtmwtrYT7E,264
charset_normalizer-3.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
charset_normalizer-3.4.0.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070
charset_normalizer-3.4.0.dist-info/METADATA,sha256=WGbEW9ehh2spNJxo1M6sEGGZWmsQ-oj2DsMjV29zoms,34159
charset_normalizer-3.4.0.dist-info/RECORD,,
charset_normalizer-3.4.0.dist-info/WHEEL,sha256=7B4nnId14TToQHuAKpxbDLCJbNciqBsV-mvXE2hVLJc,151
charset_normalizer-3.4.0.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcUEHQRS0xfxDIE_pEz4wLIXA,65
charset_normalizer-3.4.0.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19
charset_normalizer/__init__.py,sha256=UzI3xC8PhmcLRMzSgPb6minTmRq0kWznnCBJ8ZCc2XI,1577
charset_normalizer/__main__.py,sha256=JxY8bleaENOFlLRb9HfoeZCzAMnn2A1oGR5Xm2eyqg0,73
charset_normalizer/__pycache__/__init__.cpython-312.pyc,,
charset_normalizer/__pycache__/__main__.cpython-312.pyc,,
charset_normalizer/__pycache__/api.cpython-312.pyc,,
charset_normalizer/__pycache__/cd.cpython-312.pyc,,
charset_normalizer/__pycache__/constant.cpython-312.pyc,,
charset_normalizer/__pycache__/legacy.cpython-312.pyc,,
charset_normalizer/__pycache__/md.cpython-312.pyc,,
charset_normalizer/__pycache__/models.cpython-312.pyc,,
charset_normalizer/__pycache__/utils.cpython-312.pyc,,
charset_normalizer/__pycache__/version.cpython-312.pyc,,
charset_normalizer/api.py,sha256=kMyNUqrfBZU22PP0pYKrSldtYUGA24wsGlXGLAKra7c,22559
charset_normalizer/cd.py,sha256=xwZliZcTQFA3jU0c00PRiu9MNxXTFxQkFLWmMW24ZzI,12560
charset_normalizer/cli/__init__.py,sha256=D5ERp8P62llm2FuoMzydZ7d9rs8cvvLXqE-1_6oViPc,100
charset_normalizer/cli/__main__.py,sha256=zX9sV_ApU1d96Wb0cS04vulstdB4F0Eh7kLn-gevfw4,10411
charset_normalizer/cli/__pycache__/__init__.cpython-312.pyc,,
charset_normalizer/cli/__pycache__/__main__.cpython-312.pyc,,
charset_normalizer/constant.py,sha256=uwoW87NicWZDTLviX7le0wdoYBbhBQDA4n1JtJo77ts,40499
charset_normalizer/legacy.py,sha256=XJjkT0hejMH8qfAKz1ts8OUiBT18t2FJP3tJgLwUWwc,2327
charset_normalizer/md.cpython-312-x86_64-linux-gnu.so,sha256=W654QTU3QZI6eWJ0fanScAr0_O6sL0I61fyRSdC-39Y,16064
charset_normalizer/md.py,sha256=SIIZcENrslI7h3v4GigbFN61fRyE_wiCN1z9Ii3fBRo,20138
charset_normalizer/md__mypyc.cpython-312-x86_64-linux-gnu.so,sha256=aHbqbG6rcHNRf7bwpi57D1X8ZKAmyoVnTldPUDzCKMo,276848
charset_normalizer/models.py,sha256=oAMAcBSEY7CngbUXJp34Wc4Rl9NKJJjGmUwW3EPtk6g,12425
charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
charset_normalizer/utils.py,sha256=teiosMqzKjXyAHXnGdjSBOgnBZwx-SkBbCLrx0UXy8M,11894
charset_normalizer/version.py,sha256=AX66S4ytQFdd6F5jbVU2OPMqYwFS5M3BkMvyX-3BKF8,79

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: setuptools (75.1.0)
Root-Is-Purelib: false
Tag: cp312-cp312-manylinux_2_17_x86_64
Tag: cp312-cp312-manylinux2014_x86_64

@ -0,0 +1,2 @@
[console_scripts]
normalizer = charset_normalizer.cli:cli_detect

@ -0,0 +1,46 @@
# -*- coding: utf-8 -*-
"""
Charset-Normalizer
~~~~~~~~~~~~~~
The Real First Universal Charset Detector.
A library that helps you read text from an unknown charset encoding.
Motivated by chardet, This package is trying to resolve the issue by taking a new approach.
All IANA character set names for which the Python core library provides codecs are supported.
Basic usage:
>>> from charset_normalizer import from_bytes
>>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8'))
>>> best_guess = results.best()
>>> str(best_guess)
'Bсеки човек има право на образование. Oбразованието!'
Others methods and usages are available - see the full documentation
at <https://github.com/Ousret/charset_normalizer>.
:copyright: (c) 2021 by Ahmed TAHRI
:license: MIT, see LICENSE for more details.
"""
import logging
from .api import from_bytes, from_fp, from_path, is_binary
from .legacy import detect
from .models import CharsetMatch, CharsetMatches
from .utils import set_logging_handler
from .version import VERSION, __version__
__all__ = (
"from_fp",
"from_path",
"from_bytes",
"is_binary",
"detect",
"CharsetMatch",
"CharsetMatches",
"__version__",
"VERSION",
"set_logging_handler",
)
# Attach a NullHandler to the top level logger by default
# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
logging.getLogger("charset_normalizer").addHandler(logging.NullHandler())

@ -0,0 +1,4 @@
from .cli import cli_detect
if __name__ == "__main__":
cli_detect()

Some files were not shown because too many files have changed in this diff Show More