commit 938979ae88f6c40633a4abadbffdb2de3317a5aa Author: leo <10200039@qq.com> Date: Sat Mar 25 10:45:03 2023 +0800 初始化 diff --git a/README.md b/README.md new file mode 100644 index 0000000..383133c --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +## NovaLOG \ No newline at end of file diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/__pycache__/main.cpython-310.pyc b/app/__pycache__/main.cpython-310.pyc new file mode 100644 index 0000000..24d0472 Binary files /dev/null and b/app/__pycache__/main.cpython-310.pyc differ diff --git a/app/__pycache__/main.cpython-39.pyc b/app/__pycache__/main.cpython-39.pyc new file mode 100644 index 0000000..ce233a3 Binary files /dev/null and b/app/__pycache__/main.cpython-39.pyc differ diff --git a/app/api/__init__.py b/app/api/__init__.py new file mode 100644 index 0000000..25c83c3 --- /dev/null +++ b/app/api/__init__.py @@ -0,0 +1,159 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : __init__.py +# @Software : VSCode +# @Datetime : 2021/11/03 16:59:57 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +import traceback +from fastapi import FastAPI, Request, status, HTTPException +from fastapi.encoders import jsonable_encoder +from fastapi.responses import JSONResponse +from fastapi.exceptions import RequestValidationError +from fastapi.middleware.cors import CORSMiddleware + +# from slowapi import Limiter +# from slowapi.util import get_remote_address +# from slowapi.middleware import SlowAPIMiddleware + +from api.v1 import api_v1 +from extensions import logger +from core.settings import config +from utils.custom_exc import PostParamsError, TokenAuthError # 自定义异常 + +# swigger 文档分类 https://fastapi.tiangolo.com/tutorial/metadata/ +tags_metadata = [ + { + "name": "首页", + "description": "数据API", + }, +] + +def create_app(): + app = FastAPI( + title="Chatmed", + description="", + version="0.0.1", + docs_url=config.DOCS_URL, + openapi_url=config.OPENAPI_URL, + redoc_url=config.REDOC_URL, + openapi_tags=tags_metadata + ) + + app.include_router( + api_v1, + prefix="/api/v1", + ) + + register_exception(app) # 注册捕获异常信息 + register_cors(app) # 跨域设置 + register_middleware(app) + + return app + +def register_exception(app: FastAPI): + """ + 全局异常捕获 + :param app: + :return: + """ + + # 捕获自定义异常 + @app.exception_handler(PostParamsError) + async def query_params_exception_handler(request: Request, exc: PostParamsError): + """ + 捕获 自定义抛出的异常 + :param request: + :param exc: + :return: + """ + logger.error(f"参数查询异常\nURL:{request.url}\nHeaders:{request.headers}\n{traceback.format_exc()}") + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content={"code": 400, "data": {"tip": exc.err_desc}, "message": "fail"}, + ) + + @app.exception_handler(TokenAuthError) + async def token_exception_handler(request: Request, exc: TokenAuthError): + logger.error(f"参数查询异常\nURL:{request.url}\nHeaders:{request.headers}\n{traceback.format_exc()}") + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content={"code": 400, "data": None, "message": exc.err_desc}, + ) + + # 捕获参数 验证错误 + @app.exception_handler(RequestValidationError) + async def validation_exception_handler(request: Request, exc: RequestValidationError): + """ + 捕获请求参数 验证错误 + :param request: + :param exc: + :return: + """ + logger.error(f"参数错误\nURL:{request.url}\nHeaders:{request.headers}\n{traceback.format_exc()}") + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=jsonable_encoder({"code": 400, "data": {"tip": exc.errors()}, "body": exc.body, "message": "fail"}), + ) + + # 捕获全部异常 + @app.exception_handler(Exception) + async def all_exception_handler(request: Request, exc: Exception): + logger.error(f"全局异常\nURL:{request.url}\nHeaders:{request.headers}\n{traceback.format_exc()}") + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content={"code": 500, "data": {"tip": "服务器错误"}, "message": "fail"}, + ) + + # 捕获调用超限异常 + # @app.exception_handler(HTTPException) + # async def rate_limit_handler(request, exc): + # if exc.status_code == 429: + # return JSONResponse(status_code=status.HTTP_429_TOO_MANY_REQUESTS, + # content={"code": 429, "data": "我还在处理您的问题,请稍后再试!", "message": "to many requests"},) + # return JSONResponse( + # status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + # content={"code": 500, "data": {"tip": "服务器错误"}, "message": "fail"}, + # ) + +def register_cors(app: FastAPI): + """ + 支持跨域 + + 貌似发现了一个bug + https://github.com/tiangolo/fastapi/issues/133 + + :param app: + :return: + """ + + app.add_middleware( + CORSMiddleware, + # allow_origins=['http://localhost:8081'], # 有效, 但是本地vue端口一直在变化, 接口给其他人用也不一定是这个端口 + # allow_origins=['*'], # 无效 bug allow_origins=['http://localhost:8081'] + allow_origin_regex='https?://.*', # 改成用正则就行了 + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + +def register_middleware(app: FastAPI): + """ + 请求响应拦截 hook + + https://fastapi.tiangolo.com/tutorial/middleware/ + :param app: + :return: + """ + + @app.middleware("http") + async def logger_request(request: Request, call_next): + # https://stackoverflow.com/questions/60098005/fastapi-starlette-get-client-real-ip + logger.info(f"访问记录:{request.method} url:{request.url}\nheaders:{request.headers.get('user-agent')}" + f"\nIP:{request.client.host}") + + response = await call_next(request) + + return response \ No newline at end of file diff --git a/app/api/__pycache__/__init__.cpython-310.pyc b/app/api/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..78a3f48 Binary files /dev/null and b/app/api/__pycache__/__init__.cpython-310.pyc differ diff --git a/app/api/__pycache__/__init__.cpython-39.pyc b/app/api/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..6a40c18 Binary files /dev/null and b/app/api/__pycache__/__init__.cpython-39.pyc differ diff --git a/app/api/v1/__init__.py b/app/api/v1/__init__.py new file mode 100644 index 0000000..511bf59 --- /dev/null +++ b/app/api/v1/__init__.py @@ -0,0 +1,23 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : __init__.py +# @Software : VSCode +# @Datetime : 2021/11/03 17:19:33 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +""" + +路由汇总 + +""" + +from fastapi import APIRouter +from api.v1 import auth +from api.v1 import chat + +api_v1 = APIRouter() + +api_v1.include_router(auth.router, tags=["鉴权相关"]) +api_v1.include_router(chat.router, tags=["J-Med聊天"]) \ No newline at end of file diff --git a/app/api/v1/__pycache__/__init__.cpython-310.pyc b/app/api/v1/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..4cc1c45 Binary files /dev/null and b/app/api/v1/__pycache__/__init__.cpython-310.pyc differ diff --git a/app/api/v1/__pycache__/__init__.cpython-39.pyc b/app/api/v1/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..9813407 Binary files /dev/null and b/app/api/v1/__pycache__/__init__.cpython-39.pyc differ diff --git a/app/api/v1/auth/__init__.py b/app/api/v1/auth/__init__.py new file mode 100644 index 0000000..196b807 --- /dev/null +++ b/app/api/v1/auth/__init__.py @@ -0,0 +1,11 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : __init__.py +# @Software : VSCode +# @Datetime : 2021/11/03 17:20:53 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +from .views import router +from .views import get_current_user \ No newline at end of file diff --git a/app/api/v1/auth/__pycache__/__init__.cpython-310.pyc b/app/api/v1/auth/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..725afbf Binary files /dev/null and b/app/api/v1/auth/__pycache__/__init__.cpython-310.pyc differ diff --git a/app/api/v1/auth/__pycache__/__init__.cpython-39.pyc b/app/api/v1/auth/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..2c3ca75 Binary files /dev/null and b/app/api/v1/auth/__pycache__/__init__.cpython-39.pyc differ diff --git a/app/api/v1/auth/__pycache__/views.cpython-310.pyc b/app/api/v1/auth/__pycache__/views.cpython-310.pyc new file mode 100644 index 0000000..ac1b741 Binary files /dev/null and b/app/api/v1/auth/__pycache__/views.cpython-310.pyc differ diff --git a/app/api/v1/auth/__pycache__/views.cpython-39.pyc b/app/api/v1/auth/__pycache__/views.cpython-39.pyc new file mode 100644 index 0000000..2aa02a8 Binary files /dev/null and b/app/api/v1/auth/__pycache__/views.cpython-39.pyc differ diff --git a/app/api/v1/auth/crud/__init__.py b/app/api/v1/auth/crud/__init__.py new file mode 100644 index 0000000..cfee95d --- /dev/null +++ b/app/api/v1/auth/crud/__init__.py @@ -0,0 +1,9 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : __init__.py +# @Software : VSCode +# @Datetime : 2021/11/04 21:24:52 +# @Author : leo liu +# @Version : 1.0 +# @Description : + diff --git a/app/api/v1/auth/crud/menu.py b/app/api/v1/auth/crud/menu.py new file mode 100644 index 0000000..9a110cb --- /dev/null +++ b/app/api/v1/auth/crud/menu.py @@ -0,0 +1,99 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : menu.py +# @Software : VSCode +# @Datetime : 2021/11/08 22:13:05 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +from typing import List, Optional +from loguru import logger +from sqlalchemy import func, desc +from sqlalchemy.orm import Session +from sqlalchemy.sql.expression import and_, or_ +from pydantic.types import conint + +from extensions.curd_base import CRUDBase +from models import Menu, role_menu +from ..schemas import menu_schema + +class CRUDMenu(CRUDBase[Menu, menu_schema.MenuCreate, menu_schema.MenuUpdate]): + @staticmethod + def query_all(db: Session, *, col_val: str = "", is_delete: int = 0, page: int = 0, page_size: conint(le=50) = 20, order_by: str = "id", is_desc: bool = False) -> dict: + temp_page = (page - 1) * page_size + search = None + order_type = None + + # 处理多个字段的模糊查询:name + if col_val: + search = and_(Menu.is_delete == is_delete, or_(Menu.name.like('%' + col_val + "%"), Menu.path.like('%' + col_val + "%"))) + else: + search = Menu.is_delete == is_delete + + # 处理排序方式 + # 利用属性名反射类属性 + if is_desc: + order_type = desc(Menu.getAttrFromName(Menu, order_by)) + else: + order_type = Menu.getAttrFromName(Menu, order_by) + + # 查询数量 + total = db.query(func.count(Menu.menu_id)).filter(search).scalar() + + # 查询结果集 + if page > 0: + query_obj = db.query(Menu).filter(search).order_by(order_type).offset(temp_page).limit(page_size).all() + else: + query_obj = db.query(Menu).filter(search).order_by(order_type).all() + + items = [{ + "menu_id": obj.menu_id, + "path": obj.path, + "name": obj.name, + "super_menu": obj.super_menu and obj.super_menu or "" + } for obj in query_obj] + + return { + "items": items, + "total": total + } + + def create(self, db: Session, *, obj_in: menu_schema.MenuCreate): + db_obj = Menu( + path = obj_in.path, + name = obj_in.name, + super_menu = obj_in.super_menu + ) + + db.add(db_obj) + db.commit() + # db.refresh(db_obj) + + def sync_menus(self, db: Session, *, obj_arr: List): + for obj_in in obj_arr: + menu = db.query(Menu).filter(Menu.path == obj_in["path"]).first() + + if (menu): + menu.name = obj_in["name"] + menu.path = obj_in["path"] + menu.super_menu = obj_in["super_menu"] + menu.is_delete = obj_in["is_delete"] + + if (menu.is_delete): + db.query(role_menu).filter(role_menu.c.menu_id == menu.menu_id).delete() + else: + db_obj = Menu ( + path = obj_in["path"], + name = obj_in["name"], + super_menu = obj_in["super_menu"] + ) + + db.add(db_obj) + + db.commit() + + def get_by_path(self, db: Session, *, path: str) -> Menu: + return db.query(Menu).filter(Menu.path == path).first() + +crud_menu = CRUDMenu(Menu) \ No newline at end of file diff --git a/app/api/v1/auth/crud/role.py b/app/api/v1/auth/crud/role.py new file mode 100644 index 0000000..73473c0 --- /dev/null +++ b/app/api/v1/auth/crud/role.py @@ -0,0 +1,123 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : role.py +# @Software : VSCode +# @Datetime : 2021/11/15 21:25:46 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +from loguru import logger +from pydantic.types import conint +from sqlalchemy import func, desc +from sqlalchemy.orm import Session +from sqlalchemy.sql.expression import and_, or_ + +from extensions.curd_base import CRUDBase +from models import Role, user_role, role_menu +from .menu import crud_menu +from ..schemas import role_schema + + +class CRUDRole(CRUDBase[Role, role_schema.RoleCreate, role_schema.RoleUpdate]): + @staticmethod + def query_all(db: Session, *, col_val: str = "", is_delete: int = 0, page: int = 0, page_size: conint(le=50) = 20, order_by: str = "id", is_desc: bool = False) -> dict: + """ + 查询角色列表 + :param db: + :param col_val: 输入模糊查询参数值,对应username、nickname和email + :param is_delete: 输入参数值,是否逻辑删除 + :param page: 输入参数值,分页页号,为0时不分页 + :param page_size: 输入参数值,分页页宽,page=0时无效 + :param order_by: 输入参数值,排序字段 + :param is_desc: 排序顺序,true为asc,false为desc + :return: Role字典 + """ + temp_page = (page - 1) * page_size + search = None + order_type = None + + # 处理多个字段的模糊查询:name + if col_val: + search = and_(Role.is_delete == is_delete, or_(Role.name.like('%' + col_val + "%"), Role.title.like('%' + col_val + "%"))) + else: + search = Role.is_delete == is_delete + + # 处理排序方式 + # 利用属性名反射类属性 + if is_desc: + order_type = desc(Role.getAttrFromName(Role, order_by)) + else: + order_type = Role.getAttrFromName(Role, order_by) + + # 查询数量 + total = db.query(func.count(Role.role_id)).filter(search).scalar() + + # 查询结果集 + if page > 0: + query_obj = db.query(Role).filter(search).order_by(order_type).offset(temp_page).limit(page_size).all() + else: + query_obj = db.query(Role).filter(search).order_by(order_type).all() + + items = [{ + "id": obj.id, + "role_id": obj.role_id, + "name": obj.name, + "title": obj.title, + "menus": obj.menus, + "create_time": obj.create_time, + "is_delete": obj.is_delete, + "update_time": obj.update_time + } for obj in query_obj] + + return { + "items": items, + "total": total + } + + @staticmethod + def get_by_id(db: Session, *, role_id: str) -> Role: + return db.query(Role).filter(Role.role_id == role_id).first() + + @staticmethod + def is_exist(db: Session, role_in: role_schema.BaseRole) -> bool: + role = db.query(Role).filter(or_( + Role.role_id == role_in.role_id, + Role.name == role_in.name, + Role.title == role_in.title + )).first() + + if role: + return True + else: + return False + + @staticmethod + def update(db: Session, *, obj_in: role_schema.RoleUpdate) -> Role: + db_obj = db.query(Role).filter(Role.role_id == obj_in.role_id).first() + + if db_obj: + if (obj_in.title): + db_obj.title = obj_in.title + + if (obj_in.menus): + db_obj.menus.clear() + + for m in obj_in.menus: + menu = crud_menu.get_by_path(db, path=m['path']) + + if (menu): + db_obj.menus.append(menu) + + db_obj.is_delete = obj_in.is_delete + + if (obj_in.is_delete): + db.query(user_role).filter(user_role.c.role_id == db_obj.role_id).delete() + db.query(role_menu).filter(role_menu.c.role_id == db_obj.role_id).delete() + + db.commit() + db.refresh(db_obj) + + return db_obj + +crud_role = CRUDRole(Role) diff --git a/app/api/v1/auth/crud/user.py b/app/api/v1/auth/crud/user.py new file mode 100644 index 0000000..41d1a99 --- /dev/null +++ b/app/api/v1/auth/crud/user.py @@ -0,0 +1,216 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : user.py +# @Software : VSCode +# @Datetime : 2021/11/04 21:25:44 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +from datetime import datetime +from typing import Optional +from loguru import logger +from pydantic.types import conint +from sqlalchemy import func, desc +from sqlalchemy.orm import Session +from sqlalchemy.sql.expression import and_, or_, join + +from core.security import get_password_hash, verify_password +from extensions.curd_base import CRUDBase +from models import User, Role, LoginHistory + +from .role import crud_role +from ..schemas import user_schema + +class CRUDUser(CRUDBase[User, user_schema.UserCreate, user_schema.UserUpdate]): + @staticmethod + def query_all(db: Session, *, col_val: str = "", is_delete: int = 0, role: str = "", page: int = 1, page_size: conint(le=50) = 20, order_by: str = "id", is_desc: bool = False) -> dict: + """ + 查询用户列表 + :param db: + :param page: + :param page_size: + :return: + """ + + temp_page = (page - 1) * page_size + search = None + order_type = None + + # 处理多个字段的模糊查询:username、nickname、email + if col_val: + search = and_(User.is_delete == is_delete, or_(User.username.like('%' + col_val + "%"), User.nickname.like('%' + col_val + "%"), User.email.like('%' + col_val + "%"))) + else: + search = User.is_delete == is_delete + + # 处理角色查询:role + if role: + search = and_(search, Role.role_id == role) + + # 处理排序方式 + # 利用属性名反射类属性 + if is_desc: + order_type = desc(User.getAttrFromName(User, order_by)) + else: + order_type = User.getAttrFromName(User, order_by) + + # 查询数量 + total = db.query(func.count(User.user_id)).outerjoin(User.roles).filter(search).scalar() + + # 查询结果集 + if page > 0: # page > 0 分页查询 + query_obj = db.query(User).outerjoin(User.roles).filter(search).order_by(order_type).offset(temp_page).limit(page_size).all() + else: # page <= 0 全量查询 + query_obj = db.query(User).outerjoin(User.roles).filter(search).order_by(order_type).all() + + items = [{ + "id": obj.id, + "user_id": obj.user_id, + "username": obj.username, + "nickname": obj.nickname, + "email": obj.email, + "avatar": obj.avatar, + "phone": obj.phone, + "gender": obj.gender, + "register_time": obj.register_time, + "last_login_time": obj.last_login_time, + "last_login_ip": obj.last_login_ip, + "wechat_openid": obj.wechat_openid, + "country": obj.country, + "province": obj.province, + "city": obj.city, + "roles": obj.roles, + "is_delete": obj.is_delete, + "update_time": obj.update_time + } for obj in query_obj] + + return { + "items": items, + "total": total + } + + + @staticmethod + def get_by_email(db: Session, *, email: str) -> Optional[User]: + """ + 通过email获取用户 + 参数里面的* 表示 后面调用的时候 要用指定参数的方法调用 + 正确调用方式 + curd_user.get_by_email(db, email="xxx") + 错误调用方式 + curd_user.get_by_email(db, "xxx") + :param db: + :param email: + :return: + """ + return db.query(User).filter(User.email == email).first() + + @staticmethod + def get_by_username(db: Session, *, username: str) -> Optional[User]: + """ + 通过用户名获取用户 + """ + return db.query(User).filter(User.username == username).first() + + def authenticate(self, db: Session, *, username: str, password: str, ip: str) -> Optional[User]: + user = self.get_by_username(db, username=username) + if not user: + return None + if not verify_password(password, user.hashed_password): + return None + + user.last_login_time = datetime.now() + user.last_login_ip = ip + + login_history = LoginHistory( + user_id = user.user_id, + username = user.username, + login_time = user.last_login_time, + login_ipv4 = user.last_login_ip + ) + + user.login_histories.append(login_history) + + db.commit() + db.refresh(user) + + return user + + def authenticate_by_email(self, db: Session, *, email: str, password: str) -> Optional[User]: + user = self.get_by_email(db, email=email) + if not user: + return None + if not verify_password(password, user.hashed_password): + return None + return user + + @staticmethod + def is_active(user: User) -> bool: + return user.is_active == 1 + + @staticmethod + def is_delete(user: User) -> bool: + return user.is_delete == 1 + + @staticmethod + def is_exist(db: Session, user_in: user_schema.UserCreate) -> bool: + user = db.query(User).filter(User.username == user_in.username).first() + + if user: + return True + else: + return False + + @staticmethod + def create(db: Session, *, obj_in: user_schema.UserCreate) -> User: + db_user = User( + username=obj_in.username, + nickname=obj_in.nickname, + email=obj_in.email, + hashed_password=get_password_hash(obj_in.password), + q_limit_day=obj_in.q_limit_day, + tokens_limit=obj_in.tokens_limit, + # avatar=obj_in.avatar, + is_active=obj_in.is_active + ) + + for role_id in obj_in.roles: + role = crud_role.get_by_id(db, role_id=role_id) + + if (role): + db_user.roles.append(role) + + db.add(db_user) + db.commit() + db.refresh(db_user) + return db_user + + @staticmethod + def update(db: Session, *, obj_in: user_schema.UserUpdate) -> User: + db_obj = db.query(User).filter(User.username == obj_in.username).first() + + if db_obj: + db_obj.nickname = obj_in.nickname + db_obj.email = obj_in.email + db_obj.hashed_password = get_password_hash(obj_in.password) if obj_in.password else db_obj.hashed_password + + if obj_in.q_day_limit >= 0: + db_obj.q_day_limit = obj_in.q_day_limit + + if obj_in.tokens_limit >= 0: + db_obj.tokens_limit = obj_in.tokens_limit + + db_obj.roles.clear() + + for role_id in obj_in.roles: + role = crud_role.get_by_id(db, role_id=role_id) + + if (role): + db_obj.roles.append(role) + + db_obj.is_active = obj_in.is_active + db_obj.is_delete = obj_in.is_delete + + db.commit() + db.refresh(db_obj) + return db_obj \ No newline at end of file diff --git a/app/api/v1/auth/schemas/__init__.py b/app/api/v1/auth/schemas/__init__.py new file mode 100644 index 0000000..39c5fa2 --- /dev/null +++ b/app/api/v1/auth/schemas/__init__.py @@ -0,0 +1,8 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : __init__.py +# @Software : VSCode +# @Datetime : 2021/11/03 17:27:57 +# @Author : leo liu +# @Version : 1.0 +# @Description : diff --git a/app/api/v1/auth/schemas/menu_schema.py b/app/api/v1/auth/schemas/menu_schema.py new file mode 100644 index 0000000..98e2c8a --- /dev/null +++ b/app/api/v1/auth/schemas/menu_schema.py @@ -0,0 +1,35 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : menu_schema.py +# @Software : VSCode +# @Datetime : 2021/11/08 22:15:39 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +""" + +""" + +from typing import Optional +from pydantic import BaseModel + +class MenuCreate(BaseModel): + path: str + name: str + super_menu: Optional[str] = None + +class MenuUpdate(BaseModel): + menu_id: str + name: Optional[str] = None + path: Optional[str] = None + super_menu: Optional[str] = None + is_delete: Optional[int] = None + +class MenuQuery(BaseModel): + col_val: Optional[str] = '' + is_delete: Optional[int] = 0 + page: Optional[int] = 0 + page_size: Optional[int] = 20 + order_by: Optional[str] = 'id' + is_desc: Optional[bool] = False diff --git a/app/api/v1/auth/schemas/role_schema.py b/app/api/v1/auth/schemas/role_schema.py new file mode 100644 index 0000000..a370648 --- /dev/null +++ b/app/api/v1/auth/schemas/role_schema.py @@ -0,0 +1,35 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : role_schema.py +# @Software : VSCode +# @Datetime : 2021/11/13 21:51:02 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +""" + +""" +from typing import List, Optional +from pydantic import BaseModel + +class BaseRole(BaseModel): + role_id: Optional[str] = None + name: Optional[str] = None + title: Optional[str] = '' + is_delete: Optional[int] = 0 + +class RoleCreate(BaseRole): + name: str + title: str + +class RoleUpdate(BaseRole): + title: Optional[str] = None + menus: Optional[List] = None + +class RoleQuery(BaseRole): + col_val: Optional[str] = '' + page: Optional[int] = 0 + page_size: Optional[int] = 20 + order_by: Optional[str] = 'id' + is_desc: Optional[bool] = False \ No newline at end of file diff --git a/app/api/v1/auth/schemas/token_schema.py b/app/api/v1/auth/schemas/token_schema.py new file mode 100644 index 0000000..1c4450a --- /dev/null +++ b/app/api/v1/auth/schemas/token_schema.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2020/7/10 17:01 +# @Author : CoderCharm +# @File : token_schema.py +# @Software: PyCharm +# @Desc : +""" + +""" + +from typing import Optional +from pydantic import BaseModel + +class Token(BaseModel): + token: str + +class TokenPayload(BaseModel): + sub: Optional[int] = None + diff --git a/app/api/v1/auth/schemas/user_schema.py b/app/api/v1/auth/schemas/user_schema.py new file mode 100644 index 0000000..00fd5fb --- /dev/null +++ b/app/api/v1/auth/schemas/user_schema.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2020/7/7 16:23 +# @Author : CoderCharm +# @File : user_schema.py +# @Software: PyCharm +# @Desc : +""" + +""" + +from typing import Optional +from pydantic import BaseModel, EmailStr, AnyHttpUrl + +class UserBase(BaseModel): + username: str + is_active: Optional[bool] = True + is_delete: Optional[int] = 0 + +class UserAuth(UserBase): + username: str + password: str + +class UserCreate(UserBase): + password: str + nickname: Optional[str] = None + email: Optional[EmailStr] = None + phone: Optional[str] = None + # avatar: Optional[AnyHttpUrl] = None + gender: Optional[int] = 0 + wechat_openid: Optional[str] = None + country: Optional[str] = None + province: Optional[str] = None + city: Optional[str] = None + q_limit_day: Optional[int] = 0 + tokens_limit: Optional[int] = 0 + roles: Optional[list] = [] + +class UserUpdate(UserBase): + password: Optional[str] = None + nickname: Optional[str] = None + email: Optional[EmailStr] = None + phone: Optional[str] = None + avatar: Optional[AnyHttpUrl] = None + gender: Optional[int] = 0 + wechat_openid: Optional[str] = None + country: Optional[str] = None + province: Optional[str] = None + city: Optional[str] = None + q_limit_day: Optional[int] = -1 + tokens_limit: Optional[int] = -1 + roles: Optional[list] = [] + +class UserQuery(BaseModel): + col_val: Optional[str] = '' + is_delete: Optional[int] = 0 + role: Optional[str] = '' + page: Optional[int] = 0 + page_size: Optional[int] = 20 + order_by: Optional[str] = 'id' + is_desc: Optional[bool] = False diff --git a/app/api/v1/auth/views.py b/app/api/v1/auth/views.py new file mode 100644 index 0000000..56ea7cb --- /dev/null +++ b/app/api/v1/auth/views.py @@ -0,0 +1,322 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : views.py +# @Software : VSCode +# @Datetime : 2021/11/03 17:24:24 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +from typing import Any, List, Optional, Union +from datetime import datetime, timedelta +from fastapi import APIRouter, Depends, Header, Request +from sqlalchemy.orm.session import Session +from sqlalchemy.sql.functions import count + +from core.settings import config +from core import security +from utils import response_code +from utils.messages import msg +from utils import custom_exc +from db.session import get_db +from models import User, Menu +from extensions import logger + +from .schemas import user_schema, role_schema, menu_schema +from .crud.user import CRUDUser +from .crud.role import crud_role +from .crud.menu import crud_menu + +router = APIRouter() + +def get_current_user( + db: Session = Depends(get_db), token: Optional[str] = Header(None) +) -> User: + """ + 根据header中token 获取当前用户 + :param db: + :param token: + :return: + """ + if not token: + raise custom_exc.TokenAuthError(err_desc='headers not found token') + + crud_user = CRUDUser(User) + + token_data = security.check_jwt_token(token) + user = crud_user.get(db, id=token_data.sub) + if not user: + raise custom_exc.UserNotFound(err_desc="user not found") + return user + +@router.post("/auth/login", summary="登录") +async def auth_login( + *, + db: Session = Depends(get_db), + user_info: user_schema.UserAuth, + request: Request + ) -> Any: + + crud_user = CRUDUser(User) + # 验证用户 + user = crud_user.authenticate(db, username=user_info.username, password=user_info.password, ip=request.client.host) + + if not user: + return response_code.resp_200(message=msg.MSG_WRONG_USERINFO) + elif not crud_user.is_active(user): + return response_code.resp_200(message=msg.MSG_USER_INACTIVE) + elif crud_user.is_delete(user): + return response_code.resp_200(message=msg.MSG_USER_UNSIGNUP) + + access_token_expires = timedelta(minutes=config.ACCESS_TOKEN_EXPIRE_MINUTES) + + logger.info(f"用户[{user.username}]登录成功") + return response_code.resp_200(data={ + "message": msg.MSG_USER_LOGIN_SUCCESS, + "token": security.create_access_token(user.id, expires_delta=access_token_expires), + }) + +@router.post("/auth/userinfo", summary="个人信息") +async def auth_info( + *, + current_user: User = Depends(get_current_user) +) -> Any: + """ + 获取用户信息 + :param db: + :param current_user: + :return: + """ + menus = [] + roles = [] + + for role in current_user.roles: + for menu in role.menus: + menus.append(menu.path) + + roles.append(role.name) + + return response_code.resp_200(data={ + "username": current_user.username, + "nickname": current_user.nickname, + "email": current_user.email, + "avatar": current_user.avatar, + "roles": roles, + "menus": menus + }) + +@router.post("/auth/logout", summary="登出") +async def auth_logout( + *, + current_user: User = Depends(get_current_user) +): + """ + 用户退出 + :param current_user: + """ + logger.info(f"用户[{current_user.username}]退出登录") + return response_code.resp_200(message="logout success") + +@router.post("/auth/users", summary="用户列表") +async def user_list( + *, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), + query: user_schema.UserQuery = None +): + """ + 用户列表 + """ + crud_user = CRUDUser(User) + + is_accessable = False + for role in current_user.roles: + for menu in role.menus: + if menu.path == "/system/auth/user": + is_accessable = True + break + + if not is_accessable: + logger.info(f"无权查询用户列表->用户id:{current_user.username}") + return response_code.resp_200(data=None) + else: + logger.info(f"查询用户列表->用户id:{current_user.username}当前页{query.page}长度{query.page_size}") + response_result = crud_user.query_all(db, col_val=query.col_val, is_delete=query.is_delete, role=query.role, page=query.page, page_size=query.page_size, order_by=query.order_by, is_desc=query.is_desc) + return response_code.resp_200(data=response_result) + +@router.post("/auth/roles", summary="角色列表") +async def role_list( + *, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), + query: role_schema.RoleQuery = None +): + """ + 角色列表 + """ + is_accessable = False + + for role in current_user.roles: + for menu in role.menus: + if menu.path == "/system/auth/role": + is_accessable = True + break + + if not is_accessable: + logger.info(f"无权查询角色列表->用户id:{current_user.username}") + return response_code.resp_200(data=None) + else: + logger.info(f"查询角色列表->用户id:{current_user.username}当前页{query.page}长度{query.page_size}") + response_result = crud_role.query_all(db, col_val=query.col_val, is_delete=query.is_delete, page=query.page, page_size=query.page_size, order_by=query.order_by, is_desc=query.is_desc) + return response_code.resp_200(data=response_result) + +@router.post("/auth/newuser", summary="创建用户") +async def new_user( + *, + db: Session = Depends(get_db), + # token: Optional[str] = Header(None), + user: user_schema.UserCreate +) -> Any: + """ + 创建用户 + """ + # logger.info(f"创建新用户->用户id:{user.username}") + crud_user = CRUDUser(User) + + if (crud_user.is_exist(db, user_in=user)): + logger.info(f"创建新用户失败,用户已存在->帐号:{user.username}") + return response_code.resp_5010(message="user already exist") + + user = crud_user.create(db, obj_in=user) + + if (user): + logger.info(f"创建新用户成功->帐号:{user.username}") + return response_code.resp_200(data=user, message="success created user") + else: + logger.info(f"创建新用户失败->帐号:{user.username}") + return response_code.resp_5010(data=None, message="failed created user") + +@router.post("/auth/updateuser", summary="修改用户/逻辑删除用户") +async def update_user( + *, + db: Session = Depends(get_db), + token: Optional[str] = Header(None), + user: user_schema.UserUpdate +) -> Any: + """ + 创建用户 + """ + # logger.info(f"创建新用户->用户id:{user.username}") + crud_user = CRUDUser(User) + + if (not crud_user.is_exist(db, user)): + logger.info(f"修改用户失败,用户不存在->用户id:{user.username}") + return response_code.resp_5010(message="user not exist") + + user = crud_user.update(db, obj_in=user) + + if (user): + logger.info(f"修改用户成功->用户id:{user.username}") + return response_code.resp_200(data=user, message="success updated user") + else: + logger.info(f"修改用户失败->用户id:{user.username}") + return response_code.resp_5010(data=None, message="failed updated user") + +@router.post("/auth/newrole", summary="创建角色") +async def new_role( + *, + db: Session = Depends(get_db), + token: Optional[str] = Header(None), + role: role_schema.RoleCreate +) -> Any: + """ + 创建角色 + """ + + if (crud_role.is_exist(db, role_in=role)): + logger.info(f"创建新角色失败,角色已存在->角色名称:{role.name}") + return response_code.resp_5010(message="role already exist") + + role = crud_role.create(db, obj_in=role) + + if (role): + logger.info(f"创建新角色成功->角色标题:{role.title}") + return response_code.resp_200(data=role, message="success created role") + else: + logger.info(f"创建新角色失败->角色标题:{role.title}") + return response_code.resp_5010(data=None, message="failed created role") + +@router.post("/auth/roleinfo", summary="角色信息") +async def role_info( + *, + db: Session = Depends(get_db), + token: Optional[str] = Header(None), + query: role_schema.BaseRole +) -> Any: + """ + 获取角色信息 + :param db: + :param token: + :param role_id: + :return: + """ + role = crud_role.get_by_id(db=db, role_id=query.role_id) + + if(role): + return response_code.resp_200(data={ + "role_id": query.role_id, + "name": role.name, + "title": role.title, + "menus": role.menus, + "is_delete": role.is_delete + }) + + return response_code.resp_200(data=None) + +@router.post("/auth/updaterole", summary="修改角色/逻辑删除角色") +async def update_role( + *, + db: Session = Depends(get_db), + token: Optional[str] = Header(None), + role: role_schema.RoleUpdate +) -> Any: + """ + 修改角色 + """ + role = crud_role.update(db, obj_in=role) + + if (role): + logger.info(f"修改角色成功->角色名:{role.name}") + return response_code.resp_200(data=role, message="success updated role") + else: + logger.info(f"修改角色失败->角色名:{role.name}") + return response_code.resp_5010(data=None, message="failed updated role") + +@router.post("/auth/menus", summary="菜单列表") +async def menu_list( + *, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), + query: menu_schema.MenuQuery = None +): + """ + 菜单列表 + """ + + logger.info(f"查询菜单列表->用户id:{current_user.username}当前页{query.page}长度{query.page_size}") + response_result = crud_menu.query_all(db, col_val=query.col_val, is_delete=query.is_delete, page=query.page, page_size=query.page_size, order_by=query.order_by, is_desc=query.is_desc) + return response_code.resp_200(data=response_result) + +@router.post("/auth/syncmenus", summary="同步菜单") +async def sync_menus( + *, + db: Session = Depends(get_db), + token: Optional[str] = Header(None), + data: List = None +): + if (data and data.count): + crud_menu.sync_menus(db, obj_arr = data) + + response_result = crud_menu.query_all(db, is_delete=0) + + return response_code.resp_200(data=response_result) diff --git a/app/api/v1/chat/__init__.py b/app/api/v1/chat/__init__.py new file mode 100644 index 0000000..f8b2a59 --- /dev/null +++ b/app/api/v1/chat/__init__.py @@ -0,0 +1,10 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : __init__.py +# @Software : VSCode +# @Datetime : 2023/03/20 15:54:34 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +from .views import router \ No newline at end of file diff --git a/app/api/v1/chat/__pycache__/__init__.cpython-310.pyc b/app/api/v1/chat/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..829311b Binary files /dev/null and b/app/api/v1/chat/__pycache__/__init__.cpython-310.pyc differ diff --git a/app/api/v1/chat/__pycache__/views.cpython-310.pyc b/app/api/v1/chat/__pycache__/views.cpython-310.pyc new file mode 100644 index 0000000..698d89d Binary files /dev/null and b/app/api/v1/chat/__pycache__/views.cpython-310.pyc differ diff --git a/app/api/v1/chat/crud/__pycache__/chat.cpython-310.pyc b/app/api/v1/chat/crud/__pycache__/chat.cpython-310.pyc new file mode 100644 index 0000000..cefe3c0 Binary files /dev/null and b/app/api/v1/chat/crud/__pycache__/chat.cpython-310.pyc differ diff --git a/app/api/v1/chat/crud/chat.py b/app/api/v1/chat/crud/chat.py new file mode 100644 index 0000000..30e1688 --- /dev/null +++ b/app/api/v1/chat/crud/chat.py @@ -0,0 +1,96 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : chat.py +# @Software : VSCode +# @Datetime : 2023/03/23 11:25:10 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +from datetime import date +from sqlalchemy.orm import Session +from sqlalchemy.sql.expression import and_ +from typing import Any + +from extensions.curd_base import CRUDBase +from models import ChatHistory, ChatCountDay +from ..schemas import chat_schema +from models import User, ChatCountDay, Settings + +class CRUDChat(CRUDBase[ChatHistory, ChatCountDay, chat_schema.Chat]): + ''' + 获取openai_key + ''' + def get_openai_keys(self, db: Session) -> Any: + openai_keys = [] + settings = db.query(Settings).filter(Settings.key == 'openai_key') + + if settings: + for setting in settings: + openai_keys.append(setting.value) + + return openai_keys + + ''' + 获取默认max_tokens + ''' + def get_default_max_tokens(self, db:Session) -> int: + default_max_tokens = 2000 + setting = db.query(Settings).filter(Settings.key == 'default_max_tokens').first() + + if setting: + if (str.isdigit(setting.value)): + limit = int(setting.value) + if limit > 0 and limit <= 2048: + default_max_tokens = limit + + return default_max_tokens + + ''' + 获取当前用户当天的提问次数 + ''' + def get_chat_count(self, db: Session, user: User) -> int: + q_date = date.today() + chat_count_day = db.query(ChatCountDay).filter(and_(ChatCountDay.user_id == user.user_id, ChatCountDay.q_time == q_date)).first() + + if not chat_count_day: + return -1 + + return chat_count_day.q_times + + ''' + 更新当前用户当天的提问次数 + ''' + def update_chat_count(self, db: Session, user: User): + q_date = date.today() + chat_count_day = db.query(ChatCountDay).filter(and_(ChatCountDay.user_id == user.user_id, ChatCountDay.q_time == q_date)).first() + + if not chat_count_day: + chat_count_day = ChatCountDay( + user_id = user.user_id, + q_time = q_date, + q_times = 1 + ) + + user.chat_count_day.append(chat_count_day) + else: + chat_count_day.q_times = chat_count_day.q_times + 1 + + db.commit() + db.refresh(user) + + ''' + 记录用户会话 + ''' + def new_chat(self, db: Session, user: User, q: str, a: str): + chat_history = ChatHistory( + user_id = user.user_id, + username = user.username, + q_content = q, + a_content = a + ) + + user.chat_history.append(chat_history) + + db.commit() + db.refresh(user) diff --git a/app/api/v1/chat/ctrl/__init__.py b/app/api/v1/chat/ctrl/__init__.py new file mode 100644 index 0000000..5350867 --- /dev/null +++ b/app/api/v1/chat/ctrl/__init__.py @@ -0,0 +1,9 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : __init__.py +# @Software : VSCode +# @Datetime : 2023/03/20 23:15:18 +# @Author : leo liu +# @Version : 1.0 +# @Description : + diff --git a/app/api/v1/chat/ctrl/__pycache__/__init__.cpython-310.pyc b/app/api/v1/chat/ctrl/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..2b130bd Binary files /dev/null and b/app/api/v1/chat/ctrl/__pycache__/__init__.cpython-310.pyc differ diff --git a/app/api/v1/chat/ctrl/__pycache__/chat.cpython-310.pyc b/app/api/v1/chat/ctrl/__pycache__/chat.cpython-310.pyc new file mode 100644 index 0000000..b003b71 Binary files /dev/null and b/app/api/v1/chat/ctrl/__pycache__/chat.cpython-310.pyc differ diff --git a/app/api/v1/chat/ctrl/chat.py b/app/api/v1/chat/ctrl/chat.py new file mode 100644 index 0000000..a3ebb31 --- /dev/null +++ b/app/api/v1/chat/ctrl/chat.py @@ -0,0 +1,17 @@ +import openai + +class CTRLChat(): + ''' + OpenAI api调用控制类 + ''' + def chat(self, key:str, q:str, tokens_limit:int) -> str: + openai.api_key = key + completion = openai.ChatCompletion.create( + model="gpt-3.5-turbo", + messages=[ + {"role": "user", "content": q} + ], + max_tokens=tokens_limit + ) + + return completion.choices[0].message.content \ No newline at end of file diff --git a/app/api/v1/chat/schemas/__pycache__/chat_schema.cpython-310.pyc b/app/api/v1/chat/schemas/__pycache__/chat_schema.cpython-310.pyc new file mode 100644 index 0000000..3ca25fd Binary files /dev/null and b/app/api/v1/chat/schemas/__pycache__/chat_schema.cpython-310.pyc differ diff --git a/app/api/v1/chat/schemas/chat_schema.py b/app/api/v1/chat/schemas/chat_schema.py new file mode 100644 index 0000000..8103849 --- /dev/null +++ b/app/api/v1/chat/schemas/chat_schema.py @@ -0,0 +1,13 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : chat_schema.py +# @Software : VSCode +# @Datetime : 2023/03/20 23:31:03 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +from pydantic import BaseModel + +class Chat(BaseModel): + question: str \ No newline at end of file diff --git a/app/api/v1/chat/views.py b/app/api/v1/chat/views.py new file mode 100644 index 0000000..0ab2fe9 --- /dev/null +++ b/app/api/v1/chat/views.py @@ -0,0 +1,84 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : views.py +# @Software : VSCode +# @Datetime : 2023/03/20 15:56:34 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +from typing import Optional, Any +from fastapi import APIRouter, Depends, Header +from sqlalchemy.orm.session import Session + +from extensions import logger +from core import security +from utils import response_code, custom_exc +from db.session import get_db +from models import User, ChatHistory + +from .schemas import chat_schema +from .ctrl.chat import CTRLChat +from ..auth.crud.user import CRUDUser +from .crud.chat import CRUDChat + +router = APIRouter() + +def get_current_user( + db: Session = Depends(get_db), token: Optional[str] = Header(None) +) -> User: + """ + 根据header中token 获取当前用户 + :param db: + :param token: + :return: + """ + if not token: + raise custom_exc.TokenAuthError(err_desc='headers not found token') + + crud_user = CRUDUser(User) + + token_data = security.check_jwt_token(token) + user = crud_user.get(db, id=token_data.sub) + if not user: + raise custom_exc.UserNotFound(err_desc="user not found") + return user + +@router.post("/chat/ask", summary="提问") +async def ask( + *, + db: Session = Depends(get_db), + security: Optional[bool] = Depends(security.check_jwt_token), + current_user: User = Depends(get_current_user), + chat_info:chat_schema.Chat = None +) -> Any: + if not chat_info: + return response_code.resp_200(data="", message="no chat info") + + crud_chat = CRUDChat(ChatHistory) + ctrl_chat = CTRLChat() + + q_times = crud_chat.get_chat_count(db, current_user) + q_limit_day = current_user.q_limit_day + + if ((q_limit_day > 0 and q_times >= q_limit_day) or q_limit_day < 0): + return response_code.resp_200(data="您今天的提问次数已达到限额,欢迎您明日再来!", message="success chat with j-med") + + openai_keys = crud_chat.get_openai_keys(db) + default_max_tokens = crud_chat.get_default_max_tokens(db) + + if not openai_keys or len(openai_keys) <= 0: + return response_code.resp_200(data="", message="no openai_key defined") + + key = openai_keys[0] + tokens_limit = current_user.tokens_limit + + if tokens_limit == 0: + tokens_limit = default_max_tokens + + answer = ctrl_chat.chat(key, chat_info.question, tokens_limit) + + crud_chat.update_chat_count(db, current_user) + crud_chat.new_chat(db, current_user, chat_info.question, answer) + + return response_code.resp_200(data=answer, message="success chat with j-med") \ No newline at end of file diff --git a/app/core/__init__.py b/app/core/__init__.py new file mode 100644 index 0000000..151e422 --- /dev/null +++ b/app/core/__init__.py @@ -0,0 +1,9 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : __init__.py +# @Software : VSCode +# @Datetime : 2021/11/03 17:31:26 +# @Author : leo liu +# @Version : 1.0 +# @Description : + diff --git a/app/core/__pycache__/__init__.cpython-310.pyc b/app/core/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..b3ea2b2 Binary files /dev/null and b/app/core/__pycache__/__init__.cpython-310.pyc differ diff --git a/app/core/__pycache__/__init__.cpython-39.pyc b/app/core/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..eb16dd4 Binary files /dev/null and b/app/core/__pycache__/__init__.cpython-39.pyc differ diff --git a/app/core/__pycache__/security.cpython-310.pyc b/app/core/__pycache__/security.cpython-310.pyc new file mode 100644 index 0000000..effaf66 Binary files /dev/null and b/app/core/__pycache__/security.cpython-310.pyc differ diff --git a/app/core/__pycache__/security.cpython-39.pyc b/app/core/__pycache__/security.cpython-39.pyc new file mode 100644 index 0000000..115d78d Binary files /dev/null and b/app/core/__pycache__/security.cpython-39.pyc differ diff --git a/app/core/security.py b/app/core/security.py new file mode 100644 index 0000000..df32acf --- /dev/null +++ b/app/core/security.py @@ -0,0 +1,71 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : security.py +# @Software : VSCode +# @Datetime : 2021/11/03 17:33:20 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +""" + +token password 验证 + +pip install python-jose + +pip install passlib + + +""" + +from datetime import datetime, timedelta +from typing import Any, Union, Optional +from fastapi import Depends, Header +from jose import jwt +from passlib.context import CryptContext +from pydantic import ValidationError + +from core.settings import config +from utils import custom_exc +from api.v1.auth.schemas import token_schema + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + +def create_access_token( + subject: Union[str, Any], expires_delta: timedelta = None +) -> str: + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta( + minutes=config.ACCESS_TOKEN_EXPIRE_MINUTES + ) + to_encode = {"exp": expire, "sub": str(subject)} + encoded_jwt = jwt.encode(to_encode, config.SECRET_KEY, algorithm=config.JWT_ALGORITHM) + return encoded_jwt + + +def verify_password(plain_password: str, hashed_password: str) -> bool: + return pwd_context.verify(plain_password, hashed_password) + + +def get_password_hash(password: str) -> str: + return pwd_context.hash(password) + +def check_jwt_token( + token: Optional[str] = Header(None) +) -> Union[str, Any]: + """ + 只解析验证token + :param token: + :return: + """ + + try: + payload = jwt.decode( + token, + config.SECRET_KEY, algorithms=config.JWT_ALGORITHM + ) + return token_schema.TokenPayload(**payload) + except (jwt.JWTError, ValidationError, AttributeError): + raise custom_exc.TokenAuthError(err_desc="access token fail") \ No newline at end of file diff --git a/app/core/settings/__init__.py b/app/core/settings/__init__.py new file mode 100644 index 0000000..7c30f20 --- /dev/null +++ b/app/core/settings/__init__.py @@ -0,0 +1,21 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : __init__.py +# @Software : VSCode +# @Datetime : 2021/11/03 17:01:05 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +import os + +# 获取环境变量 +env = os.getenv("ENV", "") +if env: + # 如果有虚拟环境 则是 生产环境 + print("----------生产环境启动------------") + from .production_config import config +else: + # 没有则是开发环境 + print("----------开发环境启动------------") + from .development_config import config \ No newline at end of file diff --git a/app/core/settings/__pycache__/__init__.cpython-310.pyc b/app/core/settings/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..455a6bb Binary files /dev/null and b/app/core/settings/__pycache__/__init__.cpython-310.pyc differ diff --git a/app/core/settings/__pycache__/__init__.cpython-39.pyc b/app/core/settings/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..205def7 Binary files /dev/null and b/app/core/settings/__pycache__/__init__.cpython-39.pyc differ diff --git a/app/core/settings/__pycache__/development_config.cpython-310.pyc b/app/core/settings/__pycache__/development_config.cpython-310.pyc new file mode 100644 index 0000000..5ca410a Binary files /dev/null and b/app/core/settings/__pycache__/development_config.cpython-310.pyc differ diff --git a/app/core/settings/__pycache__/development_config.cpython-39.pyc b/app/core/settings/__pycache__/development_config.cpython-39.pyc new file mode 100644 index 0000000..d07fb3d Binary files /dev/null and b/app/core/settings/__pycache__/development_config.cpython-39.pyc differ diff --git a/app/core/settings/development_config.py b/app/core/settings/development_config.py new file mode 100644 index 0000000..f33d2a3 --- /dev/null +++ b/app/core/settings/development_config.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2020/6/9 14:47 +# @Author : CoderCharm +# @File : development_config.py +# @Software: PyCharm +# @Desc : +""" + +开发环境配置 + +""" +from typing import Optional +from pydantic import BaseSettings +from urllib.parse import quote_plus as urlquote +import os + + +class Config(BaseSettings): + # 文档地址 + DOCS_URL: str = "/api/v1/docs" + # # 文档关联请求数据接口 + OPENAPI_URL: str = "/api/v1/openapi.json" + # 禁用 redoc 文档 + REDOC_URL: Optional[str] = None + + ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 * 8 + JWT_ALGORITHM: str = "HS256" + SECRET_KEY: str = 'koelndom' + + # 配置你的Mysql环境 + MYSQL_USERNAME: str = 'root' + # MYSQL_USERNAME: str = 'bireport' + MYSQL_PASSWORD: str = "1qazxsw2" + # MYSQL_PASSWORD: str = "Jiahui@0108" + # MYSQL_HOST: Union[AnyHttpUrl, IPvAnyAddress] = "192.168.0.112" + MYSQL_HOST: str = "120.55.81.57" + # MYSQL_HOST: str = "rm-uf6d1660v738e34f6vo.mysql.rds.aliyuncs.com" + MYSQL_DATABASE: str = 'jmedchat' + + # Mysql地址 + SQLALCHEMY_DATABASE_URI = f"mysql+pymysql://{MYSQL_USERNAME}:{urlquote(MYSQL_PASSWORD)}@" \ + f"{MYSQL_HOST}/{MYSQL_DATABASE}?charset=utf8" + + os.environ["HTTP_PROXY"] = "http://127.0.0.1:7890" + os.environ["HTTPS_PROXY"] = "http://127.0.0.1:7890" + +config = Config() diff --git a/app/core/settings/production_config.py b/app/core/settings/production_config.py new file mode 100644 index 0000000..823f99f --- /dev/null +++ b/app/core/settings/production_config.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2020/6/16 10:15 +# @Author : CoderCharm +# @File : production_config.py +# @Software: PyCharm +# @Desc : +""" + +生产环境 + +我这种是一种方式,简单直观 + +还有一种是服务一个固定路径放一个配置文件如 /etc/conf 下 xxx.ini 或者 xxx.py文件 +然后项目默认读取 /etc/conf 目录下的配置文件,能读取则为生产环境, +读取不到则为开发环境,开发环境配置可以直接写在代码里面(或者配置ide环境变量) + +服务器上设置 ENV 环境变量 + + +""" +import os + +from typing import Union, Optional +from pydantic import AnyHttpUrl, BaseSettings, IPvAnyAddress + + +class Config(BaseSettings): + # 文档地址 成产环境可以关闭 None + DOCS_URL: Optional[str] = "/api/v1/docs" + # # 文档关联请求数据接口 成产环境可以关闭 None + OPENAPI_URL: Optional[str] = "/api/v1/openapi.json" + # 禁用 redoc 文档 + REDOC_URL: Optional[str] = None + + ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 * 8 # 8 天 + SECRET_KEY: str = '-*&^)()sd(*A%&^aWEQaasda_asdasd*&*)(asd%$#' + + MYSQL_USERNAME: str = os.getenv("MYSQL_USER", "root") + MYSQL_PASSWORD: str = os.getenv("MYSQL_PASSWORD", "1qaz@WSX") + MYSQL_HOST: Union[AnyHttpUrl, IPvAnyAddress] = os.getenv("MYSQL_HOST", "120.55.81.57") + MYSQL_DATABASE: str = 'chat' + + # Mysql地址 + SQLALCHEMY_DATABASE_URI = f"mysql+pymysql://{MYSQL_USERNAME}:{MYSQL_PASSWORD}@" \ + f"{MYSQL_HOST}/{MYSQL_DATABASE}?charset=utf8mb4" + + +config = Config() diff --git a/app/db/__init__.py b/app/db/__init__.py new file mode 100644 index 0000000..d7b539b --- /dev/null +++ b/app/db/__init__.py @@ -0,0 +1,8 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : __init__.py +# @Software : VSCode +# @Datetime : 2021/11/03 21:36:56 +# @Author : leo liu +# @Version : 1.0 +# @Description : diff --git a/app/db/__pycache__/__init__.cpython-310.pyc b/app/db/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..dda5ace Binary files /dev/null and b/app/db/__pycache__/__init__.cpython-310.pyc differ diff --git a/app/db/__pycache__/__init__.cpython-39.pyc b/app/db/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..70963b9 Binary files /dev/null and b/app/db/__pycache__/__init__.cpython-39.pyc differ diff --git a/app/db/__pycache__/base_class.cpython-310.pyc b/app/db/__pycache__/base_class.cpython-310.pyc new file mode 100644 index 0000000..f2bb49a Binary files /dev/null and b/app/db/__pycache__/base_class.cpython-310.pyc differ diff --git a/app/db/__pycache__/base_class.cpython-39.pyc b/app/db/__pycache__/base_class.cpython-39.pyc new file mode 100644 index 0000000..683ab0e Binary files /dev/null and b/app/db/__pycache__/base_class.cpython-39.pyc differ diff --git a/app/db/__pycache__/session.cpython-310.pyc b/app/db/__pycache__/session.cpython-310.pyc new file mode 100644 index 0000000..aa20180 Binary files /dev/null and b/app/db/__pycache__/session.cpython-310.pyc differ diff --git a/app/db/__pycache__/session.cpython-39.pyc b/app/db/__pycache__/session.cpython-39.pyc new file mode 100644 index 0000000..9ca4af2 Binary files /dev/null and b/app/db/__pycache__/session.cpython-39.pyc differ diff --git a/app/db/base_class.py b/app/db/base_class.py new file mode 100644 index 0000000..dd23d6f --- /dev/null +++ b/app/db/base_class.py @@ -0,0 +1,47 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : base_class.py +# @Software : VSCode +# @Datetime : 2021/11/04 21:27:43 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +import uuid +from datetime import datetime + +from sqlalchemy import Column, Integer, DateTime + +from sqlalchemy.sql import func +from sqlalchemy.ext.declarative import as_declarative, declared_attr + +@as_declarative() +class Base: + # 通用的字段 + id = Column(Integer, unique=True, index=True, primary_key=True, autoincrement=True) + create_time = Column(DateTime, default=datetime.now, server_default=func.now(), comment="创建时间") + update_time = Column(DateTime, default=datetime.now, onupdate=datetime.now, server_default=func.now(), + server_onupdate=func.now(), comment="更新时间") + is_delete = Column(Integer, default=0, comment="逻辑删除:0=未删除,1=删除", server_default='0') + __name__: str + + # Generate __tablename__ automatically + @declared_attr + def __tablename__(cls) -> str: + import re + # 如果没有指定__tablename__ 则默认使用model类名转换表名字 + name_list = re.findall(r"[A-Z][a-z\d]*", cls.__name__) + # 表名格式替换成 下划线_格式 如 MallUser 替换成 mall_user + return "_".join(name_list).lower() + + def getAttrFromName(self, attrName): + if hasattr(self, attrName): + return getattr(self, attrName) + else: + return getattr(self, "id") + + +def gen_uuid() -> str: + # 生成uuid + # https://stackoverflow.com/questions/183042/how-can-i-use-uuids-in-sqlalchemy?rq=1 + return uuid.uuid4().hex diff --git a/app/db/session.py b/app/db/session.py new file mode 100644 index 0000000..db2d9f9 --- /dev/null +++ b/app/db/session.py @@ -0,0 +1,31 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : session.py +# @Software : VSCode +# @Datetime : 2021/11/04 15:49:56 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +from typing import Generator + +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker + +from core.settings import config + +engine = create_engine( + config.SQLALCHEMY_DATABASE_URI +) + +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +Base = declarative_base() + +def get_db() -> Generator: + try: + db = SessionLocal() + yield db + finally: + db.close() \ No newline at end of file diff --git a/app/extensions/__init__.py b/app/extensions/__init__.py new file mode 100644 index 0000000..d87f0e5 --- /dev/null +++ b/app/extensions/__init__.py @@ -0,0 +1,17 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : __init__.py +# @Software : VSCode +# @Datetime : 2021/11/03 17:17:10 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +""" + +其他扩展文件全局对象 + +""" + + +from .logger import logger \ No newline at end of file diff --git a/app/extensions/__pycache__/__init__.cpython-310.pyc b/app/extensions/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..c4a7c20 Binary files /dev/null and b/app/extensions/__pycache__/__init__.cpython-310.pyc differ diff --git a/app/extensions/__pycache__/__init__.cpython-39.pyc b/app/extensions/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..6e13ce9 Binary files /dev/null and b/app/extensions/__pycache__/__init__.cpython-39.pyc differ diff --git a/app/extensions/__pycache__/curd_base.cpython-310.pyc b/app/extensions/__pycache__/curd_base.cpython-310.pyc new file mode 100644 index 0000000..d421a52 Binary files /dev/null and b/app/extensions/__pycache__/curd_base.cpython-310.pyc differ diff --git a/app/extensions/__pycache__/curd_base.cpython-39.pyc b/app/extensions/__pycache__/curd_base.cpython-39.pyc new file mode 100644 index 0000000..a46c13b Binary files /dev/null and b/app/extensions/__pycache__/curd_base.cpython-39.pyc differ diff --git a/app/extensions/__pycache__/logger.cpython-310.pyc b/app/extensions/__pycache__/logger.cpython-310.pyc new file mode 100644 index 0000000..65d8acb Binary files /dev/null and b/app/extensions/__pycache__/logger.cpython-310.pyc differ diff --git a/app/extensions/__pycache__/logger.cpython-39.pyc b/app/extensions/__pycache__/logger.cpython-39.pyc new file mode 100644 index 0000000..edc6d0a Binary files /dev/null and b/app/extensions/__pycache__/logger.cpython-39.pyc differ diff --git a/app/extensions/__pycache__/schemas_base.cpython-39.pyc b/app/extensions/__pycache__/schemas_base.cpython-39.pyc new file mode 100644 index 0000000..086ee92 Binary files /dev/null and b/app/extensions/__pycache__/schemas_base.cpython-39.pyc differ diff --git a/app/extensions/curd_base.py b/app/extensions/curd_base.py new file mode 100644 index 0000000..88952b6 --- /dev/null +++ b/app/extensions/curd_base.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2020/7/7 14:08 +# @Author : CoderCharm +# @File : curd_base.py +# @Software: PyCharm +# @Desc : +""" + +""" + +from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union + +from fastapi.encoders import jsonable_encoder +from pydantic import BaseModel +from sqlalchemy.orm import Session + +from db.base_class import Base + +ModelType = TypeVar("ModelType", bound=Base) +CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel) +UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel) + + +class CRUDBase(Generic[ModelType, CreateSchemaType, UpdateSchemaType]): + def __init__(self, model: Type[ModelType]): + """ + CRUD object with default methods to Create, Read, Update, Delete (CRUD). + + **Parameters** + + * `model`: A SQLAlchemy model class + * `schema`: A Pydantic model (schema) class + """ + self.model = model + + def get(self, db: Session, id: Any) -> Optional[ModelType]: + return db.query(self.model).filter(self.model.id == id, self.model.is_delete == 0).first() + + def get_multi( + self, db: Session, *, page: int = 0, page_size: int = 100 + ) -> List[ModelType]: + temp_page = (page - 1) * page_size + return db.query(self.model).filter(self.model.is_delete == 0).offset(temp_page).limit(page_size).all() + + def create(self, db: Session, *, obj_in: CreateSchemaType) -> ModelType: + obj_in_data = jsonable_encoder(obj_in) + db_obj = self.model(**obj_in_data) # type: ignore + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + def update( + self, + db: Session, + *, + db_obj: ModelType, + obj_in: Union[UpdateSchemaType, Dict[str, Any]] + ) -> ModelType: + obj_data = jsonable_encoder(db_obj) + if isinstance(obj_in, dict): + update_data = obj_in + else: + update_data = obj_in.dict(exclude_unset=True) + for field in obj_data: + if field in update_data: + setattr(db_obj, field, update_data[field]) + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + def remove(self, db: Session, *, id: int) -> ModelType: + obj = db.query(self.model).filter(self.model.id == id).update({self.model.is_delete: 1}) + # db.delete(obj) + db.commit() + return obj diff --git a/app/extensions/logger.py b/app/extensions/logger.py new file mode 100644 index 0000000..a367c9a --- /dev/null +++ b/app/extensions/logger.py @@ -0,0 +1,38 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : logger.py +# @Software : VSCode +# @Datetime : 2021/11/03 17:17:17 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +""" + +日志文件配置 + +# 本来是想 像flask那样把日志对象挂载到app对象上,作者建议直接使用全局对象 +https://github.com/tiangolo/fastapi/issues/81#issuecomment-473677039 + +""" + +import os +import time +from loguru import logger + +basedir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +# 定位到log日志文件 +log_path = os.path.join(basedir, 'logs') + +if not os.path.exists(log_path): + os.mkdir(log_path) + +log_path = os.path.join(log_path, f'{time.strftime("%Y-%m-%d")}.log') + +# 日志简单配置 +logger.add(log_path, rotation="12:00", retention="5 days", enqueue=True) + + +__all__ = ["logger"] + diff --git a/app/logs/2023-03-20.log b/app/logs/2023-03-20.log new file mode 100644 index 0000000..cc783b9 --- /dev/null +++ b/app/logs/2023-03-20.log @@ -0,0 +1,1000 @@ +2023-03-20 15:53:39.581 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 15:53:42.328 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 16:14:52.568 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 16:14:52.921 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 16:15:37.972 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 16:15:37.976 | ERROR | api:token_exception_handler:76 - 参数查询异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '0', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/app/core/security.py", line 65, in check_jwt_token + payload = jwt.decode( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/jose/jwt.py", line 142, in decode + payload = jws.verify(token, key, algorithms, verify=verify_signature) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/jose/jws.py", line 70, in verify + header, payload, signing_input, signature = _load(token) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/jose/jws.py", line 176, in _load + signing_input, crypto_segment = jwt.rsplit(b".", 1) +AttributeError: 'NoneType' object has no attribute 'rsplit' + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 217, in app + solved_result = await solve_dependencies( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/dependencies/utils.py", line 529, in solve_dependencies + solved = await run_in_threadpool(call, **sub_values) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/concurrency.py", line 39, in run_in_threadpool + return await anyio.to_thread.run_sync(func, *args) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/to_thread.py", line 28, in run_sync + return await get_asynclib().run_sync_in_worker_thread(func, *args, cancellable=cancellable, + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 818, in run_sync_in_worker_thread + return await future + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 754, in run + result = context.run(func, *args) + File "/home/leo/Work/openai/JmedChat/app/core/security.py", line 71, in check_jwt_token + raise custom_exc.TokenAuthError(err_desc="access token fail") +utils.custom_exc.TokenAuthError + +2023-03-20 16:42:30.705 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 16:42:31.190 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 17:04:06.353 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 17:04:10.165 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 17:13:33.202 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 17:13:33.547 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/auth/login +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '89', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1819, in _execute_context + self.dialect.do_execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/default.py", line 732, in do_execute + cursor.execute(statement, parameters) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 148, in execute + result = self._query(query) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 310, in _query + conn.query(q) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 548, in query + self._affected_rows = self._read_query_result(unbuffered=unbuffered) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 775, in _read_query_result + result.read() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 1156, in read + first_packet = self.connection._read_packet() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 725, in _read_packet + packet.raise_for_error() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/protocol.py", line 221, in raise_for_error + err.raise_mysql_exception(self._data) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/err.py", line 143, in raise_mysql_exception + raise errorclass(errno, errval) +pymysql.err.ProgrammingError: (1146, "Table 'jmedchat.nlt_user' doesn't exist") + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/views.py", line 58, in auth_login + user = crud_user.authenticate(db, username=user_info.username, password=user_info.password) + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/crud/user.py", line 117, in authenticate + user = self.get_by_username(db, username=username) + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/crud/user.py", line 114, in get_by_username + return db.query(User).filter(User.username == username).first() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/query.py", line 2819, in first + return self.limit(1)._iter().first() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/query.py", line 2903, in _iter + result = self.session.execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 1696, in execute + result = conn._execute_20(statement, params or {}, execution_options) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1631, in _execute_20 + return meth(self, args_10style, kwargs_10style, execution_options) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/sql/elements.py", line 325, in _execute_on_connection + return connection._execute_clauseelement( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1498, in _execute_clauseelement + ret = self._execute_context( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1862, in _execute_context + self._handle_dbapi_exception( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 2043, in _handle_dbapi_exception + util.raise_( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/util/compat.py", line 207, in raise_ + raise exception + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1819, in _execute_context + self.dialect.do_execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/default.py", line 732, in do_execute + cursor.execute(statement, parameters) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 148, in execute + result = self._query(query) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 310, in _query + conn.query(q) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 548, in query + self._affected_rows = self._read_query_result(unbuffered=unbuffered) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 775, in _read_query_result + result.read() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 1156, in read + first_packet = self.connection._read_packet() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 725, in _read_packet + packet.raise_for_error() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/protocol.py", line 221, in raise_for_error + err.raise_mysql_exception(self._data) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/err.py", line 143, in raise_mysql_exception + raise errorclass(errno, errval) +sqlalchemy.exc.ProgrammingError: (pymysql.err.ProgrammingError) (1146, "Table 'jmedchat.nlt_user' doesn't exist") +[SQL: SELECT nlt_user.id AS nlt_user_id, nlt_user.create_time AS nlt_user_create_time, nlt_user.update_time AS nlt_user_update_time, nlt_user.is_delete AS nlt_user_is_delete, nlt_user.user_id AS nlt_user_user_id, nlt_user.username AS nlt_user_username, nlt_user.hashed_password AS nlt_user_hashed_password, nlt_user.nickname AS nlt_user_nickname, nlt_user.email AS nlt_user_email, nlt_user.avatar AS nlt_user_avatar, nlt_user.phone AS nlt_user_phone, nlt_user.gender AS nlt_user_gender, nlt_user.register_time AS nlt_user_register_time, nlt_user.last_login_time AS nlt_user_last_login_time, nlt_user.last_login_ip AS nlt_user_last_login_ip, nlt_user.register_ip AS nlt_user_register_ip, nlt_user.wechat_openid AS nlt_user_wechat_openid, nlt_user.country AS nlt_user_country, nlt_user.province AS nlt_user_province, nlt_user.city AS nlt_user_city, nlt_user.q_limit AS nlt_user_q_limit, nlt_user.q_tokens_limit AS nlt_user_q_tokens_limit, nlt_user.a_tokens_limit AS nlt_user_a_tokens_limit, nlt_user.is_active AS nlt_user_is_active +FROM nlt_user +WHERE nlt_user.username = %(username_1)s + LIMIT %(param_1)s] +[parameters: {'username_1': 'string', 'param_1': 1}] +(Background on this error at: https://sqlalche.me/e/14/f405) + +2023-03-20 17:59:57.866 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 17:59:58.274 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:00:11.545 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:00:11.689 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:01:12.295 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:01:12.425 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:01:59.418 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:01:59.650 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:06:08.568 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:06:08.864 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:06:53.485 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:06:53.489 | ERROR | api:validation_exception_handler:91 - 参数错误 +URL:http://127.0.0.1:8010/api/v1/auth/newuser +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '96', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 225, in app + raise RequestValidationError(errors, body=body) +fastapi.exceptions.RequestValidationError: 1 validation error for Request +body -> email + field required (type=value_error.missing) + +2023-03-20 18:07:24.938 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:07:24.945 | ERROR | api:validation_exception_handler:91 - 参数错误 +URL:http://127.0.0.1:8010/api/v1/auth/newuser +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '96', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 225, in app + raise RequestValidationError(errors, body=body) +fastapi.exceptions.RequestValidationError: 1 validation error for Request +body -> email + field required (type=value_error.missing) + +2023-03-20 18:08:29.920 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:08:30.211 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:10:02.389 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:10:02.721 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:10:09.231 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:10:09.248 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/auth/newuser +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '57', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/views.py", line 179, in new_user + if (crud_user.is_exist(db, user_in=user)): + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/crud/user.py", line 146, in is_exist + user = db.query(User).filter(User.email == user_in.email or User.username == user_in.username).first() +AttributeError: 'UserCreate' object has no attribute 'email' + +2023-03-20 18:10:43.486 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:10:43.737 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:10:51.850 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:10:51.866 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/auth/newuser +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '57', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/views.py", line 179, in new_user + if (crud_user.is_exist(db, user_in=user)): + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/crud/user.py", line 146, in is_exist + user = db.query(User).filter(User.email == user_in.email or User.username == user_in.username).first() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/sql/elements.py", line 3901, in __bool__ + raise TypeError("Boolean value of this clause is not defined") +TypeError: Boolean value of this clause is not defined + +2023-03-20 18:12:05.718 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:12:05.899 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:12:10.593 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:12:11.071 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/auth/newuser +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '57', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1819, in _execute_context + self.dialect.do_execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/default.py", line 732, in do_execute + cursor.execute(statement, parameters) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 148, in execute + result = self._query(query) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 310, in _query + conn.query(q) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 548, in query + self._affected_rows = self._read_query_result(unbuffered=unbuffered) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 775, in _read_query_result + result.read() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 1156, in read + first_packet = self.connection._read_packet() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 725, in _read_packet + packet.raise_for_error() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/protocol.py", line 221, in raise_for_error + err.raise_mysql_exception(self._data) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/err.py", line 143, in raise_mysql_exception + raise errorclass(errno, errval) +pymysql.err.IntegrityError: (1048, "Column 'email' cannot be null") + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/views.py", line 183, in new_user + user = crud_user.create(db, obj_in=user) + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/crud/user.py", line 174, in create + db.commit() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 1435, in commit + self._transaction.commit(_to_root=self.future) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 829, in commit + self._prepare_impl() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 808, in _prepare_impl + self.session.flush() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 3367, in flush + self._flush(objects) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 3506, in _flush + with util.safe_reraise(): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/util/langhelpers.py", line 70, in __exit__ + compat.raise_( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/util/compat.py", line 207, in raise_ + raise exception + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 3467, in _flush + flush_context.execute() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/unitofwork.py", line 456, in execute + rec.execute(self) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/unitofwork.py", line 630, in execute + util.preloaded.orm_persistence.save_obj( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/persistence.py", line 245, in save_obj + _emit_insert_statements( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/persistence.py", line 1238, in _emit_insert_statements + result = connection._execute_20( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1631, in _execute_20 + return meth(self, args_10style, kwargs_10style, execution_options) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/sql/elements.py", line 325, in _execute_on_connection + return connection._execute_clauseelement( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1498, in _execute_clauseelement + ret = self._execute_context( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1862, in _execute_context + self._handle_dbapi_exception( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 2043, in _handle_dbapi_exception + util.raise_( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/util/compat.py", line 207, in raise_ + raise exception + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1819, in _execute_context + self.dialect.do_execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/default.py", line 732, in do_execute + cursor.execute(statement, parameters) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 148, in execute + result = self._query(query) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 310, in _query + conn.query(q) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 548, in query + self._affected_rows = self._read_query_result(unbuffered=unbuffered) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 775, in _read_query_result + result.read() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 1156, in read + first_packet = self.connection._read_packet() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 725, in _read_packet + packet.raise_for_error() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/protocol.py", line 221, in raise_for_error + err.raise_mysql_exception(self._data) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/err.py", line 143, in raise_mysql_exception + raise errorclass(errno, errval) +sqlalchemy.exc.IntegrityError: (pymysql.err.IntegrityError) (1048, "Column 'email' cannot be null") +[SQL: INSERT INTO nlt_user (create_time, update_time, is_delete, user_id, username, hashed_password, nickname, email, avatar, phone, gender, register_time, last_login_time, last_login_ip, register_ip, wechat_openid, country, province, city, q_limit, q_tokens_limit, a_tokens_limit, is_active) VALUES (%(create_time)s, %(update_time)s, %(is_delete)s, %(user_id)s, %(username)s, %(hashed_password)s, %(nickname)s, %(email)s, %(avatar)s, %(phone)s, %(gender)s, %(register_time)s, %(last_login_time)s, %(last_login_ip)s, %(register_ip)s, %(wechat_openid)s, %(country)s, %(province)s, %(city)s, %(q_limit)s, %(q_tokens_limit)s, %(a_tokens_limit)s, %(is_active)s)] +[parameters: {'create_time': datetime.datetime(2023, 3, 20, 18, 12, 10, 937550), 'update_time': datetime.datetime(2023, 3, 20, 18, 12, 10, 937562), 'is_delete': 0, 'user_id': '034c889747ef4ec0aabe6aba170cb8e6', 'username': '13917323763', 'hashed_password': '$2b$12$JodO4yYFyuqUW42fIi6pu.2heV63qyrfi7I6dJbHU9dr4oVnmePNW', 'nickname': None, 'email': None, 'avatar': None, 'phone': None, 'gender': 0, 'register_time': datetime.datetime(2023, 3, 20, 18, 12, 10, 937603), 'last_login_time': datetime.datetime(2023, 3, 20, 18, 12, 10, 937606), 'last_login_ip': None, 'register_ip': None, 'wechat_openid': None, 'country': None, 'province': None, 'city': None, 'q_limit': 0, 'q_tokens_limit': 0, 'a_tokens_limit': 0, 'is_active': True}] +(Background on this error at: https://sqlalche.me/e/14/gkpj) + +2023-03-20 18:12:59.667 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:12:59.845 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:13:04.199 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:13:04.658 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/auth/newuser +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '57', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1819, in _execute_context + self.dialect.do_execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/default.py", line 732, in do_execute + cursor.execute(statement, parameters) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 148, in execute + result = self._query(query) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 310, in _query + conn.query(q) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 548, in query + self._affected_rows = self._read_query_result(unbuffered=unbuffered) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 775, in _read_query_result + result.read() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 1156, in read + first_packet = self.connection._read_packet() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 725, in _read_packet + packet.raise_for_error() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/protocol.py", line 221, in raise_for_error + err.raise_mysql_exception(self._data) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/err.py", line 143, in raise_mysql_exception + raise errorclass(errno, errval) +pymysql.err.IntegrityError: (1048, "Column 'email' cannot be null") + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/views.py", line 183, in new_user + user = crud_user.create(db, obj_in=user) + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/crud/user.py", line 174, in create + db.commit() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 1435, in commit + self._transaction.commit(_to_root=self.future) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 829, in commit + self._prepare_impl() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 808, in _prepare_impl + self.session.flush() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 3367, in flush + self._flush(objects) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 3506, in _flush + with util.safe_reraise(): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/util/langhelpers.py", line 70, in __exit__ + compat.raise_( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/util/compat.py", line 207, in raise_ + raise exception + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 3467, in _flush + flush_context.execute() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/unitofwork.py", line 456, in execute + rec.execute(self) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/unitofwork.py", line 630, in execute + util.preloaded.orm_persistence.save_obj( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/persistence.py", line 245, in save_obj + _emit_insert_statements( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/persistence.py", line 1238, in _emit_insert_statements + result = connection._execute_20( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1631, in _execute_20 + return meth(self, args_10style, kwargs_10style, execution_options) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/sql/elements.py", line 325, in _execute_on_connection + return connection._execute_clauseelement( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1498, in _execute_clauseelement + ret = self._execute_context( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1862, in _execute_context + self._handle_dbapi_exception( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 2043, in _handle_dbapi_exception + util.raise_( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/util/compat.py", line 207, in raise_ + raise exception + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1819, in _execute_context + self.dialect.do_execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/default.py", line 732, in do_execute + cursor.execute(statement, parameters) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 148, in execute + result = self._query(query) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 310, in _query + conn.query(q) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 548, in query + self._affected_rows = self._read_query_result(unbuffered=unbuffered) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 775, in _read_query_result + result.read() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 1156, in read + first_packet = self.connection._read_packet() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 725, in _read_packet + packet.raise_for_error() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/protocol.py", line 221, in raise_for_error + err.raise_mysql_exception(self._data) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/err.py", line 143, in raise_mysql_exception + raise errorclass(errno, errval) +sqlalchemy.exc.IntegrityError: (pymysql.err.IntegrityError) (1048, "Column 'email' cannot be null") +[SQL: INSERT INTO nlt_user (create_time, update_time, is_delete, user_id, username, hashed_password, nickname, email, avatar, phone, gender, register_time, last_login_time, last_login_ip, register_ip, wechat_openid, country, province, city, q_limit, q_tokens_limit, a_tokens_limit, is_active) VALUES (%(create_time)s, %(update_time)s, %(is_delete)s, %(user_id)s, %(username)s, %(hashed_password)s, %(nickname)s, %(email)s, %(avatar)s, %(phone)s, %(gender)s, %(register_time)s, %(last_login_time)s, %(last_login_ip)s, %(register_ip)s, %(wechat_openid)s, %(country)s, %(province)s, %(city)s, %(q_limit)s, %(q_tokens_limit)s, %(a_tokens_limit)s, %(is_active)s)] +[parameters: {'create_time': datetime.datetime(2023, 3, 20, 18, 13, 4, 537499), 'update_time': datetime.datetime(2023, 3, 20, 18, 13, 4, 537511), 'is_delete': 0, 'user_id': '59d910dcf683487f83b41400518b30ce', 'username': '13917323763', 'hashed_password': '$2b$12$R6.EFXvvZwNojzOqI/Rgt.wKT.Y9QPvEh0ovL2mwKA1bzmcgxpxeG', 'nickname': None, 'email': None, 'avatar': None, 'phone': None, 'gender': 0, 'register_time': datetime.datetime(2023, 3, 20, 18, 13, 4, 537554), 'last_login_time': datetime.datetime(2023, 3, 20, 18, 13, 4, 537557), 'last_login_ip': None, 'register_ip': None, 'wechat_openid': None, 'country': None, 'province': None, 'city': None, 'q_limit': 0, 'q_tokens_limit': 0, 'a_tokens_limit': 0, 'is_active': True}] +(Background on this error at: https://sqlalche.me/e/14/gkpj) + +2023-03-20 18:14:50.216 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:14:50.485 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:14:56.666 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:14:57.192 | INFO | api.v1.auth.views:new_user:186 - 创建新用户成功->帐号:13917323763 +2023-03-20 18:15:23.005 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:15:23.452 | INFO | api.v1.auth.views:auth_login:69 - 用户[13917323763]登录成功 +2023-03-20 18:15:46.825 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:15:47.042 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:16:03.881 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:16:04.412 | INFO | api.v1.auth.views:auth_login:69 - 用户[13917323763]登录成功 +2023-03-20 18:16:42.176 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 18:16:42.611 | INFO | api.v1.auth.views:new_user:186 - 创建新用户成功->帐号:18918045317 +2023-03-20 22:21:42.569 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:21:42.807 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:22:04.655 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:22:04.816 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:30:47.735 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:30:48.018 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:31:18.599 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:31:18.639 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/auth/newuser +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '80', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/relationships.py", line 2745, in _determine_joins + self.primaryjoin = join_condition( + File "", line 2, in join_condition + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/sql/selectable.py", line 1229, in _join_condition + raise exc.NoForeignKeysError( +sqlalchemy.exc.NoForeignKeysError: Can't find any foreign key relationships between 'nlt_chat_limit' and 'nlt_user'. + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/views.py", line 179, in new_user + if (crud_user.is_exist(db, user_in=user)): + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/crud/user.py", line 146, in is_exist + user = db.query(User).filter(User.username == user_in.username).first() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 2143, in query + return self._query_cls(entities, self, **kwargs) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/query.py", line 180, in __init__ + self._set_entities(entities) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/query.py", line 187, in _set_entities + self._raw_columns = [ + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/query.py", line 188, in + coercions.expect( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/sql/coercions.py", line 181, in expect + insp._post_inspect + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/util/langhelpers.py", line 1180, in __get__ + obj.__dict__[self.__name__] = result = self.fget(obj) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/mapper.py", line 2119, in _post_inspect + self._check_configure() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/mapper.py", line 1896, in _check_configure + _configure_registries({self.registry}, cascade=True) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/mapper.py", line 3420, in _configure_registries + _do_configure_registries(registries, cascade) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/mapper.py", line 3459, in _do_configure_registries + mapper._post_configure_properties() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/mapper.py", line 1913, in _post_configure_properties + prop.init() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/interfaces.py", line 231, in init + self.do_init() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/relationships.py", line 2143, in do_init + self._setup_join_conditions() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/relationships.py", line 2239, in _setup_join_conditions + self._join_condition = jc = JoinCondition( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/relationships.py", line 2634, in __init__ + self._determine_joins() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/relationships.py", line 2767, in _determine_joins + util.raise_( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/util/compat.py", line 207, in raise_ + raise exception +sqlalchemy.exc.NoForeignKeysError: Could not determine join condition between parent/child tables on relationship ChatLimit.users - there are no foreign keys linking these tables. Ensure that referencing columns are associated with a ForeignKey or ForeignKeyConstraint, or specify a 'primaryjoin' expression. + +2023-03-20 22:41:40.955 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:41:41.258 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:41:46.405 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:41:46.777 | INFO | api.v1.auth.views:new_user:186 - 创建新用户成功->帐号:13917323763 +2023-03-20 22:48:08.779 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:48:08.821 | INFO | api.v1.auth.views:new_user:180 - 创建新用户失败,用户已存在->帐号:13917323763 +2023-03-20 22:48:26.689 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:48:26.989 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:48:45.615 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:48:45.637 | INFO | api.v1.auth.views:new_user:180 - 创建新用户失败,用户已存在->帐号:13917323763 +2023-03-20 22:49:12.768 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:49:12.952 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:49:18.463 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:49:18.508 | INFO | api.v1.auth.views:new_user:180 - 创建新用户失败,用户已存在->帐号:13917323763 +2023-03-20 22:51:02.317 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:51:02.491 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:51:07.870 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:51:08.186 | INFO | api.v1.auth.views:new_user:186 - 创建新用户成功->帐号:13917323763 +2023-03-20 22:52:21.283 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:52:21.642 | INFO | api.v1.auth.views:new_user:186 - 创建新用户成功->帐号:111111 +2023-03-20 22:52:39.836 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:52:40.188 | INFO | api.v1.auth.views:new_user:186 - 创建新用户成功->帐号:123456 +2023-03-20 22:59:39.149 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:59:39.402 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:59:43.753 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 22:59:44.107 | INFO | api.v1.auth.views:new_user:186 - 创建新用户成功->帐号:123456 +2023-03-20 23:40:38.721 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 23:41:10.661 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/newuser +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 23:41:10.989 | INFO | api.v1.auth.views:new_user:186 - 创建新用户成功->帐号:13917323763 +2023-03-20 23:41:17.015 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 23:41:17.327 | INFO | api.v1.auth.views:auth_login:69 - 用户[13917323763]登录成功 +2023-03-20 23:41:31.678 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 23:41:31.948 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 23:41:53.845 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 23:42:39.386 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 23:43:27.446 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 23:43:27.459 | ERROR | api:token_exception_handler:76 - 参数查询异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '62', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/app/core/security.py", line 65, in check_jwt_token + payload = jwt.decode( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/jose/jwt.py", line 142, in decode + payload = jws.verify(token, key, algorithms, verify=verify_signature) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/jose/jws.py", line 70, in verify + header, payload, signing_input, signature = _load(token) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/jose/jws.py", line 176, in _load + signing_input, crypto_segment = jwt.rsplit(b".", 1) +AttributeError: 'NoneType' object has no attribute 'rsplit' + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 217, in app + solved_result = await solve_dependencies( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/dependencies/utils.py", line 529, in solve_dependencies + solved = await run_in_threadpool(call, **sub_values) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/concurrency.py", line 39, in run_in_threadpool + return await anyio.to_thread.run_sync(func, *args) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/to_thread.py", line 28, in run_sync + return await get_asynclib().run_sync_in_worker_thread(func, *args, cancellable=cancellable, + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 818, in run_sync_in_worker_thread + return await future + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 754, in run + result = context.run(func, *args) + File "/home/leo/Work/openai/JmedChat/app/core/security.py", line 71, in check_jwt_token + raise custom_exc.TokenAuthError(err_desc="access token fail") +utils.custom_exc.TokenAuthError + +2023-03-20 23:43:31.330 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-20 23:43:31.332 | ERROR | api:token_exception_handler:76 - 参数查询异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '62', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/app/core/security.py", line 65, in check_jwt_token + payload = jwt.decode( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/jose/jwt.py", line 142, in decode + payload = jws.verify(token, key, algorithms, verify=verify_signature) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/jose/jws.py", line 70, in verify + header, payload, signing_input, signature = _load(token) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/jose/jws.py", line 176, in _load + signing_input, crypto_segment = jwt.rsplit(b".", 1) +AttributeError: 'NoneType' object has no attribute 'rsplit' + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 217, in app + solved_result = await solve_dependencies( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/dependencies/utils.py", line 529, in solve_dependencies + solved = await run_in_threadpool(call, **sub_values) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/concurrency.py", line 39, in run_in_threadpool + return await anyio.to_thread.run_sync(func, *args) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/to_thread.py", line 28, in run_sync + return await get_asynclib().run_sync_in_worker_thread(func, *args, cancellable=cancellable, + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 818, in run_sync_in_worker_thread + return await future + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 754, in run + result = context.run(func, *args) + File "/home/leo/Work/openai/JmedChat/app/core/security.py", line 71, in check_jwt_token + raise custom_exc.TokenAuthError(err_desc="access token fail") +utils.custom_exc.TokenAuthError + diff --git a/app/logs/2023-03-21.2023-03-21_10-50-17_219133.log b/app/logs/2023-03-21.2023-03-21_10-50-17_219133.log new file mode 100644 index 0000000..e69de29 diff --git a/app/logs/2023-03-21.log b/app/logs/2023-03-21.log new file mode 100644 index 0000000..b17ac1c --- /dev/null +++ b/app/logs/2023-03-21.log @@ -0,0 +1,118 @@ +2023-03-21 23:49:19.295 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-21 23:49:32.470 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-21 23:49:49.064 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-21 23:49:49.439 | INFO | api.v1.auth.views:auth_login:69 - 用户[13917323763]登录成功 +2023-03-21 23:50:07.567 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-21 23:50:07.640 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '26', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAxMDQ5ODksInN1YiI6IjIifQ.Klb_gwMvZ4vHMLhs7VgS5K_fvg55h3DMkyfF7SdO4lM', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connection.py", line 174, in _new_conn + conn = connection.create_connection( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/util/connection.py", line 95, in create_connection + raise err + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/util/connection.py", line 85, in create_connection + sock.connect(sa) +ConnectionRefusedError: [Errno 111] Connection refused + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connectionpool.py", line 700, in urlopen + self._prepare_proxy(conn) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connectionpool.py", line 996, in _prepare_proxy + conn.connect() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connection.py", line 363, in connect + self.sock = conn = self._new_conn() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connection.py", line 186, in _new_conn + raise NewConnectionError( +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/requests/adapters.py", line 489, in send + resp = conn.urlopen( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connectionpool.py", line 815, in urlopen + return self.urlopen( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connectionpool.py", line 815, in urlopen + return self.urlopen( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connectionpool.py", line 787, in urlopen + retries = retries.increment( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/util/retry.py", line 592, in increment + raise MaxRetryError(_pool, url, error or ResponseError(cause)) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='api.openai.com', port=443): Max retries exceeded with url: /v1/chat/completions (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_requestor.py", line 516, in request_raw + result = _thread_context.session.request( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/requests/sessions.py", line 587, in request + resp = self.send(prep, **send_kwargs) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/requests/sessions.py", line 701, in send + r = adapter.send(request, **kwargs) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/requests/adapters.py", line 559, in send + raise ProxyError(e, request=request) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='api.openai.com', port=443): Max retries exceeded with url: /v1/chat/completions (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 53, in ask + answer = chat(chat_info.question) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/ctrl/chat.py", line 5, in chat + completion = openai.ChatCompletion.create( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_resources/chat_completion.py", line 25, in create + return super().create(*args, **kwargs) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 153, in create + response, _, api_key = requestor.request( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_requestor.py", line 216, in request + result = self.request_raw( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_requestor.py", line 528, in request_raw + raise error.APIConnectionError( +openai.error.APIConnectionError: Error communicating with OpenAI: HTTPSConnectionPool(host='api.openai.com', port=443): Max retries exceeded with url: /v1/chat/completions (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) + +2023-03-21 23:50:27.269 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 diff --git a/app/logs/2023-03-22.log b/app/logs/2023-03-22.log new file mode 100644 index 0000000..e69de29 diff --git a/app/logs/2023-03-23.2023-03-23_10-21-17_948127.log b/app/logs/2023-03-23.2023-03-23_10-21-17_948127.log new file mode 100644 index 0000000..bc16d01 --- /dev/null +++ b/app/logs/2023-03-23.2023-03-23_10-21-17_948127.log @@ -0,0 +1,376 @@ +2023-03-23 10:22:36.315 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 10:22:44.876 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 10:23:42.389 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 10:23:42.608 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/auth/login +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '57', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1819, in _execute_context + self.dialect.do_execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/default.py", line 732, in do_execute + cursor.execute(statement, parameters) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 148, in execute + result = self._query(query) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 310, in _query + conn.query(q) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 548, in query + self._affected_rows = self._read_query_result(unbuffered=unbuffered) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 775, in _read_query_result + result.read() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 1156, in read + first_packet = self.connection._read_packet() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 725, in _read_packet + packet.raise_for_error() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/protocol.py", line 221, in raise_for_error + err.raise_mysql_exception(self._data) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/err.py", line 143, in raise_mysql_exception + raise errorclass(errno, errval) +pymysql.err.OperationalError: (1054, "Unknown column 'nlt_user.q_limit_day' in 'field list'") + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/views.py", line 58, in auth_login + user = crud_user.authenticate(db, username=user_info.username, password=user_info.password) + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/crud/user.py", line 116, in authenticate + user = self.get_by_username(db, username=username) + File "/home/leo/Work/openai/JmedChat/app/api/v1/auth/crud/user.py", line 113, in get_by_username + return db.query(User).filter(User.username == username).first() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/query.py", line 2819, in first + return self.limit(1)._iter().first() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/query.py", line 2903, in _iter + result = self.session.execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 1696, in execute + result = conn._execute_20(statement, params or {}, execution_options) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1631, in _execute_20 + return meth(self, args_10style, kwargs_10style, execution_options) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/sql/elements.py", line 325, in _execute_on_connection + return connection._execute_clauseelement( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1498, in _execute_clauseelement + ret = self._execute_context( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1862, in _execute_context + self._handle_dbapi_exception( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 2043, in _handle_dbapi_exception + util.raise_( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/util/compat.py", line 207, in raise_ + raise exception + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1819, in _execute_context + self.dialect.do_execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/default.py", line 732, in do_execute + cursor.execute(statement, parameters) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 148, in execute + result = self._query(query) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 310, in _query + conn.query(q) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 548, in query + self._affected_rows = self._read_query_result(unbuffered=unbuffered) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 775, in _read_query_result + result.read() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 1156, in read + first_packet = self.connection._read_packet() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 725, in _read_packet + packet.raise_for_error() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/protocol.py", line 221, in raise_for_error + err.raise_mysql_exception(self._data) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/err.py", line 143, in raise_mysql_exception + raise errorclass(errno, errval) +sqlalchemy.exc.OperationalError: (pymysql.err.OperationalError) (1054, "Unknown column 'nlt_user.q_limit_day' in 'field list'") +[SQL: SELECT nlt_user.id AS nlt_user_id, nlt_user.create_time AS nlt_user_create_time, nlt_user.update_time AS nlt_user_update_time, nlt_user.is_delete AS nlt_user_is_delete, nlt_user.user_id AS nlt_user_user_id, nlt_user.username AS nlt_user_username, nlt_user.hashed_password AS nlt_user_hashed_password, nlt_user.nickname AS nlt_user_nickname, nlt_user.email AS nlt_user_email, nlt_user.avatar AS nlt_user_avatar, nlt_user.phone AS nlt_user_phone, nlt_user.gender AS nlt_user_gender, nlt_user.register_time AS nlt_user_register_time, nlt_user.last_login_time AS nlt_user_last_login_time, nlt_user.last_login_ip AS nlt_user_last_login_ip, nlt_user.register_ip AS nlt_user_register_ip, nlt_user.wechat_openid AS nlt_user_wechat_openid, nlt_user.country AS nlt_user_country, nlt_user.province AS nlt_user_province, nlt_user.city AS nlt_user_city, nlt_user.q_limit_day AS nlt_user_q_limit_day, nlt_user.tokens_limit AS nlt_user_tokens_limit, nlt_user.is_active AS nlt_user_is_active +FROM nlt_user +WHERE nlt_user.username = %(username_1)s + LIMIT %(param_1)s] +[parameters: {'username_1': '13917323763', 'param_1': 1}] +(Background on this error at: https://sqlalche.me/e/14/e3q8) + +2023-03-23 10:26:56.423 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 10:26:56.947 | INFO | api.v1.auth.views:auth_login:69 - 用户[13917323763]登录成功 +2023-03-23 10:56:08.175 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 10:56:08.535 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 10:56:13.811 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 10:56:13.812 | INFO | api.v1.auth.views:auth_login:58 - 127.0.0.1 +2023-03-23 10:56:14.274 | INFO | api.v1.auth.views:auth_login:72 - 用户[13917323763]登录成功 +2023-03-23 11:10:51.787 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 11:10:51.976 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 11:10:57.192 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 11:10:57.728 | INFO | api.v1.auth.views:auth_login:70 - 用户[13917323763]登录成功 +2023-03-23 11:56:35.406 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 11:56:35.576 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 11:56:40.287 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 11:56:41.425 | INFO | api.v1.auth.views:auth_login:70 - 用户[13917323763]登录成功 +2023-03-23 11:57:16.628 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 11:57:16.802 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '25', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyMzUwMDEsInN1YiI6IjIifQ.7QDklT73W35-3Sy-1ZdM1aGFiY4644NQ9q7k_PjdmwQ', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connection.py", line 174, in _new_conn + conn = connection.create_connection( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/util/connection.py", line 95, in create_connection + raise err + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/util/connection.py", line 85, in create_connection + sock.connect(sa) +ConnectionRefusedError: [Errno 111] Connection refused + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connectionpool.py", line 700, in urlopen + self._prepare_proxy(conn) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connectionpool.py", line 996, in _prepare_proxy + conn.connect() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connection.py", line 363, in connect + self.sock = conn = self._new_conn() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connection.py", line 186, in _new_conn + raise NewConnectionError( +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/requests/adapters.py", line 489, in send + resp = conn.urlopen( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connectionpool.py", line 815, in urlopen + return self.urlopen( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connectionpool.py", line 815, in urlopen + return self.urlopen( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/connectionpool.py", line 787, in urlopen + retries = retries.increment( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/urllib3/util/retry.py", line 592, in increment + raise MaxRetryError(_pool, url, error or ResponseError(cause)) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='api.openai.com', port=443): Max retries exceeded with url: /v1/chat/completions (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_requestor.py", line 516, in request_raw + result = _thread_context.session.request( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/requests/sessions.py", line 587, in request + resp = self.send(prep, **send_kwargs) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/requests/sessions.py", line 701, in send + r = adapter.send(request, **kwargs) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/requests/adapters.py", line 559, in send + raise ProxyError(e, request=request) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='api.openai.com', port=443): Max retries exceeded with url: /v1/chat/completions (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 56, in ask + answer = chat(chat_info.question) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/ctrl/chat.py", line 5, in chat + completion = openai.ChatCompletion.create( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_resources/chat_completion.py", line 25, in create + return super().create(*args, **kwargs) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 153, in create + response, _, api_key = requestor.request( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_requestor.py", line 216, in request + result = self.request_raw( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_requestor.py", line 528, in request_raw + raise error.APIConnectionError( +openai.error.APIConnectionError: Error communicating with OpenAI: HTTPSConnectionPool(host='api.openai.com', port=443): Max retries exceeded with url: /v1/chat/completions (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) + +2023-03-23 11:57:40.875 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 11:57:43.645 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '25', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyMzUwMDEsInN1YiI6IjIifQ.7QDklT73W35-3Sy-1ZdM1aGFiY4644NQ9q7k_PjdmwQ', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1819, in _execute_context + self.dialect.do_execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/default.py", line 732, in do_execute + cursor.execute(statement, parameters) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 148, in execute + result = self._query(query) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 310, in _query + conn.query(q) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 548, in query + self._affected_rows = self._read_query_result(unbuffered=unbuffered) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 775, in _read_query_result + result.read() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 1156, in read + first_packet = self.connection._read_packet() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 725, in _read_packet + packet.raise_for_error() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/protocol.py", line 221, in raise_for_error + err.raise_mysql_exception(self._data) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/err.py", line 143, in raise_mysql_exception + raise errorclass(errno, errval) +pymysql.err.OperationalError: (1525, "Incorrect DATE value: 'DATE'") + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 58, in ask + crud_chat.update_chat_count(db, user = current_user) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/crud/chat.py", line 24, in update_chat_count + chat_count_day = db.query(ChatCountDay).filter(and_(ChatCountDay.user_id == user.user_id, ChatCountDay.q_time == q_date)).first() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/query.py", line 2819, in first + return self.limit(1)._iter().first() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/query.py", line 2903, in _iter + result = self.session.execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 1696, in execute + result = conn._execute_20(statement, params or {}, execution_options) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1631, in _execute_20 + return meth(self, args_10style, kwargs_10style, execution_options) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/sql/elements.py", line 325, in _execute_on_connection + return connection._execute_clauseelement( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1498, in _execute_clauseelement + ret = self._execute_context( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1862, in _execute_context + self._handle_dbapi_exception( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 2043, in _handle_dbapi_exception + util.raise_( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/util/compat.py", line 207, in raise_ + raise exception + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1819, in _execute_context + self.dialect.do_execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/default.py", line 732, in do_execute + cursor.execute(statement, parameters) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 148, in execute + result = self._query(query) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 310, in _query + conn.query(q) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 548, in query + self._affected_rows = self._read_query_result(unbuffered=unbuffered) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 775, in _read_query_result + result.read() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 1156, in read + first_packet = self.connection._read_packet() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 725, in _read_packet + packet.raise_for_error() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/protocol.py", line 221, in raise_for_error + err.raise_mysql_exception(self._data) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/err.py", line 143, in raise_mysql_exception + raise errorclass(errno, errval) +sqlalchemy.exc.OperationalError: (pymysql.err.OperationalError) (1525, "Incorrect DATE value: 'DATE'") +[SQL: SELECT nlt_chat_count_day.id AS nlt_chat_count_day_id, nlt_chat_count_day.create_time AS nlt_chat_count_day_create_time, nlt_chat_count_day.update_time AS nlt_chat_count_day_update_time, nlt_chat_count_day.is_delete AS nlt_chat_count_day_is_delete, nlt_chat_count_day.user_id AS nlt_chat_count_day_user_id, nlt_chat_count_day.q_time AS nlt_chat_count_day_q_time, nlt_chat_count_day.q_times AS nlt_chat_count_day_q_times +FROM nlt_chat_count_day +WHERE nlt_chat_count_day.user_id = %(user_id_1)s AND nlt_chat_count_day.q_time = %(q_time_1)s + LIMIT %(param_1)s] +[parameters: {'user_id_1': '899a7ed3a88f46aaacd973ef20166ced', 'q_time_1': Date(), 'param_1': 1}] +(Background on this error at: https://sqlalche.me/e/14/e3q8) + diff --git a/app/logs/2023-03-23.log b/app/logs/2023-03-23.log new file mode 100644 index 0000000..fde7f4b --- /dev/null +++ b/app/logs/2023-03-23.log @@ -0,0 +1,904 @@ +2023-03-23 12:16:52.924 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:16:55.764 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '25', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyMzUwMDEsInN1YiI6IjIifQ.7QDklT73W35-3Sy-1ZdM1aGFiY4644NQ9q7k_PjdmwQ', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1819, in _execute_context + self.dialect.do_execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/default.py", line 732, in do_execute + cursor.execute(statement, parameters) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 148, in execute + result = self._query(query) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 310, in _query + conn.query(q) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 548, in query + self._affected_rows = self._read_query_result(unbuffered=unbuffered) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 775, in _read_query_result + result.read() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 1156, in read + first_packet = self.connection._read_packet() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 725, in _read_packet + packet.raise_for_error() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/protocol.py", line 221, in raise_for_error + err.raise_mysql_exception(self._data) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/err.py", line 143, in raise_mysql_exception + raise errorclass(errno, errval) +pymysql.err.OperationalError: (1525, "Incorrect DATE value: 'DATE'") + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 58, in ask + crud_chat.update_chat_count(db, user = current_user) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/crud/chat.py", line 24, in update_chat_count + chat_count_day = db.query(ChatCountDay).filter(and_(ChatCountDay.user_id == user.user_id, ChatCountDay.q_time == q_date)).first() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/query.py", line 2819, in first + return self.limit(1)._iter().first() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/query.py", line 2903, in _iter + result = self.session.execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/orm/session.py", line 1696, in execute + result = conn._execute_20(statement, params or {}, execution_options) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1631, in _execute_20 + return meth(self, args_10style, kwargs_10style, execution_options) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/sql/elements.py", line 325, in _execute_on_connection + return connection._execute_clauseelement( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1498, in _execute_clauseelement + ret = self._execute_context( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1862, in _execute_context + self._handle_dbapi_exception( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 2043, in _handle_dbapi_exception + util.raise_( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/util/compat.py", line 207, in raise_ + raise exception + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/base.py", line 1819, in _execute_context + self.dialect.do_execute( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/sqlalchemy/engine/default.py", line 732, in do_execute + cursor.execute(statement, parameters) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 148, in execute + result = self._query(query) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/cursors.py", line 310, in _query + conn.query(q) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 548, in query + self._affected_rows = self._read_query_result(unbuffered=unbuffered) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 775, in _read_query_result + result.read() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 1156, in read + first_packet = self.connection._read_packet() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/connections.py", line 725, in _read_packet + packet.raise_for_error() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/protocol.py", line 221, in raise_for_error + err.raise_mysql_exception(self._data) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/pymysql/err.py", line 143, in raise_mysql_exception + raise errorclass(errno, errval) +sqlalchemy.exc.OperationalError: (pymysql.err.OperationalError) (1525, "Incorrect DATE value: 'DATE'") +[SQL: SELECT nlt_chat_count_day.id AS nlt_chat_count_day_id, nlt_chat_count_day.create_time AS nlt_chat_count_day_create_time, nlt_chat_count_day.update_time AS nlt_chat_count_day_update_time, nlt_chat_count_day.is_delete AS nlt_chat_count_day_is_delete, nlt_chat_count_day.user_id AS nlt_chat_count_day_user_id, nlt_chat_count_day.q_time AS nlt_chat_count_day_q_time, nlt_chat_count_day.q_times AS nlt_chat_count_day_q_times +FROM nlt_chat_count_day +WHERE nlt_chat_count_day.user_id = %(user_id_1)s AND nlt_chat_count_day.q_time = %(q_time_1)s + LIMIT %(param_1)s] +[parameters: {'user_id_1': '899a7ed3a88f46aaacd973ef20166ced', 'q_time_1': Date(), 'param_1': 1}] +(Background on this error at: https://sqlalche.me/e/14/e3q8) + +2023-03-23 12:26:44.878 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:46:06.681 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:47:38.758 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:47:39.289 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:48:10.680 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:48:13.346 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '26', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyMzUwMDEsInN1YiI6IjIifQ.7QDklT73W35-3Sy-1ZdM1aGFiY4644NQ9q7k_PjdmwQ', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 59, in ask + crud_chat.update_chat_count(db, user = current_user) +TypeError: CRUDChat.update_chat_count() takes 1 positional argument but 2 positional arguments (and 1 keyword-only argument) were given + +2023-03-23 12:49:32.139 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:49:35.845 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '26', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyMzUwMDEsInN1YiI6IjIifQ.7QDklT73W35-3Sy-1ZdM1aGFiY4644NQ9q7k_PjdmwQ', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 59, in ask + crud_chat.update_chat_count(db, user = current_user) +TypeError: CRUDChat.update_chat_count() got multiple values for argument 'user' + +2023-03-23 12:51:58.105 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:52:00.557 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '26', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyMzUwMDEsInN1YiI6IjIifQ.7QDklT73W35-3Sy-1ZdM1aGFiY4644NQ9q7k_PjdmwQ', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 59, in ask + crud_chat.update_chat_count(db, current_user) +TypeError: CRUDChat.update_chat_count() takes 2 positional arguments but 3 were given + +2023-03-23 12:52:34.618 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:52:38.269 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '26', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyMzUwMDEsInN1YiI6IjIifQ.7QDklT73W35-3Sy-1ZdM1aGFiY4644NQ9q7k_PjdmwQ', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 59, in ask + crud_chat.update_chat_count(db, current_user) +TypeError: CRUDChat.update_chat_count() takes 1 positional argument but 3 were given + +2023-03-23 12:52:53.067 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:53:21.738 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:54:03.396 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:54:03.645 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:54:17.562 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:54:19.977 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '26', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyMzUwMDEsInN1YiI6IjIifQ.7QDklT73W35-3Sy-1ZdM1aGFiY4644NQ9q7k_PjdmwQ', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 59, in ask + crud_chat.update_chat_count(db, current_user) +TypeError: CRUDChat.update_chat_count() takes 2 positional arguments but 3 were given + +2023-03-23 12:55:22.340 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:56:51.467 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:56:51.672 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:57:00.073 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:57:03.113 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '26', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyMzUwMDEsInN1YiI6IjIifQ.7QDklT73W35-3Sy-1ZdM1aGFiY4644NQ9q7k_PjdmwQ', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 59, in ask + crud_chat.update_chat_count(db=db, user=current_user) +TypeError: CRUDChat.update_chat_count() got multiple values for argument 'db' + +2023-03-23 12:57:24.327 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:57:27.473 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '26', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyMzUwMDEsInN1YiI6IjIifQ.7QDklT73W35-3Sy-1ZdM1aGFiY4644NQ9q7k_PjdmwQ', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 59, in ask + crud_chat.update_chat_count(db, user=current_user) +TypeError: CRUDChat.update_chat_count() got multiple values for argument 'user' + +2023-03-23 12:57:40.478 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 12:57:44.591 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '26', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyMzUwMDEsInN1YiI6IjIifQ.7QDklT73W35-3Sy-1ZdM1aGFiY4644NQ9q7k_PjdmwQ', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 59, in ask + crud_chat.update_chat_count(db, current_user) +TypeError: CRUDChat.update_chat_count() takes 2 positional arguments but 3 were given + +2023-03-23 12:59:23.358 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:11:52.024 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:11:52.480 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '44', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyMzUwMDEsInN1YiI6IjIifQ.7QDklT73W35-3Sy-1ZdM1aGFiY4644NQ9q7k_PjdmwQ', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 58, in ask + q_times = crud_chat.get_chat_count() +TypeError: CRUDChat.get_chat_count() missing 2 required positional arguments: 'db' and 'user' + +2023-03-23 13:12:36.709 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:12:36.954 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '44', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyMzUwMDEsInN1YiI6IjIifQ.7QDklT73W35-3Sy-1ZdM1aGFiY4644NQ9q7k_PjdmwQ', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 61, in ask + if ((q_limit_day > 0 and q_times >= q_limit_day) or q_limit_day < 0): +TypeError: '>' not supported between instances of 'NoneType' and 'int' + +2023-03-23 13:14:20.494 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:16:00.033 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:16:26.872 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:16:35.009 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:22:02.077 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:22:02.697 | INFO | api.v1.auth.views:auth_login:73 - 用户[13917323763]登录成功 +2023-03-23 13:22:13.849 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:22:39.228 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:22:39.797 | INFO | api.v1.auth.views:auth_login:73 - 用户[13916742981]登录成功 +2023-03-23 13:22:57.879 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:24:50.740 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:24:51.307 | INFO | api.v1.auth.views:auth_login:73 - 用户[13917323763]登录成功 +2023-03-23 13:25:03.582 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:31:00.457 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:31:01.100 | INFO | api.v1.auth.views:auth_login:73 - 用户[13917323763]登录成功 +2023-03-23 13:31:19.751 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:31:52.733 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 13:31:53.259 | INFO | api.v1.auth.views:auth_login:73 - 用户[13916742981]登录成功 +2023-03-23 13:32:13.247 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 14:22:14.146 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 14:25:39.347 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 14:34:06.108 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 14:34:38.505 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 14:34:39.704 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '48', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAyNDA3MTMsInN1YiI6IjUifQ.XN2aGfqBllHuSaUfzwZJ-ZDHKsQ5wQ0yjr6wwNTNn2I', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 77, in ask + answer = chat(key, chat_info.question, tokens_limit) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/ctrl/chat.py", line 5, in chat + completion = openai.ChatCompletion.create( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_resources/chat_completion.py", line 25, in create + return super().create(*args, **kwargs) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 153, in create + response, _, api_key = requestor.request( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_requestor.py", line 226, in request + resp, got_stream = self._interpret_response(result, stream) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_requestor.py", line 619, in _interpret_response + self._interpret_response_line( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/openai/api_requestor.py", line 682, in _interpret_response_line + raise self.handle_error_response( +openai.error.InvalidRequestError: This model's maximum context length is 4097 tokens. However, you requested 10020 tokens (20 in the messages, 10000 in the completion). Please reduce the length of the messages or completion. + +2023-03-23 14:37:02.244 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 14:39:50.021 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 14:39:50.427 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 14:40:10.634 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-23 14:42:57.728 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 diff --git a/app/logs/2023-03-24.2023-03-24_11-20-57_925099.log b/app/logs/2023-03-24.2023-03-24_11-20-57_925099.log new file mode 100644 index 0000000..9aa08cf --- /dev/null +++ b/app/logs/2023-03-24.2023-03-24_11-20-57_925099.log @@ -0,0 +1,77 @@ +2023-03-24 11:21:33.249 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 11:21:36.393 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 11:21:50.795 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 11:21:51.682 | INFO | api.v1.auth.views:auth_login:73 - 用户[13917323763]登录成功 +2023-03-24 11:22:08.783 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 11:22:09.123 | ERROR | api:all_exception_handler:100 - 全局异常 +URL:http://127.0.0.1:8010/api/v1/chat/ask +Headers:Headers({'host': '127.0.0.1:8010', 'connection': 'keep-alive', 'content-length': '26', 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"', 'accept': 'application/json', 'content-type': 'application/json', 'sec-ch-ua-mobile': '?0', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2ODAzMTkzMTEsInN1YiI6IjIifQ.Oau8jQLeXTZNd7SImtQkQztdhKjpOn6CBbHbfcpu8fE', 'sec-ch-ua-platform': '"Linux"', 'origin': 'http://127.0.0.1:8010', 'sec-fetch-site': 'same-origin', 'sec-fetch-mode': 'cors', 'sec-fetch-dest': 'empty', 'referer': 'http://127.0.0.1:8010/api/v1/docs', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8'}) +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 81, in receive + return self.receive_nowait() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 76, in receive_nowait + raise WouldBlock +anyio.WouldBlock + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 41, in call_next + message = await recv_stream.receive() + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/anyio/streams/memory.py", line 101, in receive + raise EndOfStream +anyio.EndOfStream + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 159, in __call__ + await self.app(scope, receive, _send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 63, in __call__ + response = await self.dispatch_func(request, call_next) + File "/home/leo/Work/openai/JmedChat/app/api/__init__.py", line 143, in logger_request + response = await call_next(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 44, in call_next + raise app_exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/base.py", line 34, in coro + await self.app(scope, request.receive, send_stream.send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 92, in __call__ + await self.simple_response(scope, receive, send, request_headers=headers) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 147, in simple_response + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 82, in __call__ + raise exc + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/exceptions.py", line 71, in __call__ + await self.app(scope, receive, sender) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ + raise e + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 656, in __call__ + await route.handle(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 259, in handle + await self.app(scope, receive, send) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/starlette/routing.py", line 61, in app + response = await func(request) + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 227, in app + raw_response = await run_endpoint_function( + File "/home/leo/Work/openai/JmedChat/venv/lib/python3.10/site-packages/fastapi/routing.py", line 160, in run_endpoint_function + return await dependant.call(**values) + File "/home/leo/Work/openai/JmedChat/app/api/v1/chat/views.py", line 79, in ask + answer = ctrl_chat.chat(key, chat_info.question, tokens_limit) +TypeError: CTRLChat.chat() takes 3 positional arguments but 4 were given + +2023-03-24 11:22:28.858 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 11:22:42.693 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 diff --git a/app/logs/2023-03-24.log b/app/logs/2023-03-24.log new file mode 100644 index 0000000..0fe01a3 --- /dev/null +++ b/app/logs/2023-03-24.log @@ -0,0 +1,184 @@ +2023-03-24 12:53:52.918 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 12:53:53.438 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 12:54:08.121 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/views/Login.html +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 12:54:08.361 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/favicon.ico +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:03:38.152 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/docs +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:03:38.360 | INFO | api:logger_request:140 - 访问记录:GET url:http://127.0.0.1:8010/api/v1/openapi.json +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:11:15.583 | INFO | api:logger_request:140 - 访问记录:OPTIONS url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:11:15.591 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/auth/login +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:11:20.246 | INFO | api.v1.auth.views:auth_login:73 - 用户[13917323763]登录成功 +2023-03-24 13:11:27.111 | INFO | api:logger_request:140 - 访问记录:OPTIONS url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:11:27.121 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:11:50.724 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:09.163 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:12.053 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:12.054 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:12.054 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:12.055 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:23.149 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:23.435 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:23.503 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:23.816 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:23.991 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:24.212 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:24.361 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:24.480 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:24.611 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:24.924 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:24.964 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:25.331 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:25.560 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:25.781 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:28.620 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:28.803 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:29.028 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:29.162 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:29.419 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:29.509 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:29.736 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:29.869 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:29.985 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:30.178 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:30.359 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:30.575 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:30.608 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:30.829 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:31.064 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:31.283 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:31.383 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:31.599 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:31.707 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:31.952 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:32.036 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:32.175 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:32.355 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:32.571 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:32.672 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:32.881 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:33.010 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:33.113 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:33.249 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:33.455 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 +2023-03-24 13:13:33.569 | INFO | api:logger_request:140 - 访问记录:POST url:http://127.0.0.1:8010/api/v1/chat/ask +headers:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 +IP:127.0.0.1 diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..1ae1b62 --- /dev/null +++ b/app/main.py @@ -0,0 +1,16 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : main.py +# @Software : VSCode +# @Datetime : 2021/11/03 16:59:04 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +from api import create_app + +app = create_app() + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app='main:app', host="127.0.0.1", port=8010, reload=True, debug=True) diff --git a/app/models/__init__.py b/app/models/__init__.py new file mode 100644 index 0000000..d58ae8e --- /dev/null +++ b/app/models/__init__.py @@ -0,0 +1,18 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : __init__.py +# @Software : VSCode +# @Datetime : 2021/11/04 21:30:46 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +from db.base_class import Base +from db.session import engine + +from .auth import User, Role, user_role, LoginHistory +from .menu import Menu, role_menu +from .chat import ChatHistory, ChatCountDay +from .settings import Settings + +Base.metadata.create_all(engine) \ No newline at end of file diff --git a/app/models/__pycache__/__init__.cpython-310.pyc b/app/models/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..af12f6d Binary files /dev/null and b/app/models/__pycache__/__init__.cpython-310.pyc differ diff --git a/app/models/__pycache__/__init__.cpython-39.pyc b/app/models/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..9c7a0f2 Binary files /dev/null and b/app/models/__pycache__/__init__.cpython-39.pyc differ diff --git a/app/models/__pycache__/auth.cpython-310.pyc b/app/models/__pycache__/auth.cpython-310.pyc new file mode 100644 index 0000000..6e69e5f Binary files /dev/null and b/app/models/__pycache__/auth.cpython-310.pyc differ diff --git a/app/models/__pycache__/auth.cpython-39.pyc b/app/models/__pycache__/auth.cpython-39.pyc new file mode 100644 index 0000000..f8371d5 Binary files /dev/null and b/app/models/__pycache__/auth.cpython-39.pyc differ diff --git a/app/models/__pycache__/chat.cpython-310.pyc b/app/models/__pycache__/chat.cpython-310.pyc new file mode 100644 index 0000000..9118fb1 Binary files /dev/null and b/app/models/__pycache__/chat.cpython-310.pyc differ diff --git a/app/models/__pycache__/menu.cpython-310.pyc b/app/models/__pycache__/menu.cpython-310.pyc new file mode 100644 index 0000000..1e8c9b4 Binary files /dev/null and b/app/models/__pycache__/menu.cpython-310.pyc differ diff --git a/app/models/__pycache__/menu.cpython-39.pyc b/app/models/__pycache__/menu.cpython-39.pyc new file mode 100644 index 0000000..36d7ff2 Binary files /dev/null and b/app/models/__pycache__/menu.cpython-39.pyc differ diff --git a/app/models/__pycache__/settings.cpython-310.pyc b/app/models/__pycache__/settings.cpython-310.pyc new file mode 100644 index 0000000..d32c66f Binary files /dev/null and b/app/models/__pycache__/settings.cpython-310.pyc differ diff --git a/app/models/auth.py b/app/models/auth.py new file mode 100644 index 0000000..9aaf884 --- /dev/null +++ b/app/models/auth.py @@ -0,0 +1,79 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : user.py +# @Software : VSCode +# @Datetime : 2021/11/04 21:33:10 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +""" +用户模型 +""" + +from datetime import datetime +from sqlalchemy import Column, Integer, VARCHAR, SmallInteger, DateTime, ForeignKey, Table +from sqlalchemy.orm import relationship +from sqlalchemy.sql import func + +from db.base_class import Base, gen_uuid + +user_role = Table( + "nlt_userrole", + Base.metadata, + Column("id", Integer, unique=True, index=True, primary_key=True, autoincrement=True, comment="ID"), + Column("user_id", VARCHAR(32), ForeignKey("nlt_user.user_id"), nullable=False, primary_key=True, comment="用户id"), + Column("role_id", VARCHAR(32), ForeignKey("nlt_role.role_id"), nullable=False, primary_key=True, comment="角色id"), + Column("create_time", DateTime, default=datetime.now, server_default=func.now(), comment="创建时间") +) + +class User(Base): + """ + 用户表 + """ + __tablename__ = "nlt_user" + user_id = Column(VARCHAR(32), default=gen_uuid, index=True, primary_key=True, unique=True, comment="用户id") + username = Column(VARCHAR(128), nullable=False, unique=True, comment="用户名(不可更改)") + hashed_password = Column(VARCHAR(128), nullable=False, comment="密码") + nickname = Column(VARCHAR(128), comment="用户昵称(显示用可更改)") + email = Column(VARCHAR(128), unique=True, comment="邮箱") + avatar = Column(VARCHAR(256), nullable=True, comment="用户头像") + phone = Column(VARCHAR(16), unique=True, index=True, nullable=True, comment="手机号") + gender = Column(SmallInteger, default=0, comment="性别 0=未知 1=男 2=女", server_default="0") + register_time = Column(DateTime, default=datetime.now, comment="注册事件") + last_login_time = Column(DateTime, default=datetime.now, comment="上次登录时间") + last_login_ip = Column(VARCHAR(64), nullable=True, comment="上次登录IP") + register_ip = Column(VARCHAR(64), nullable=True, comment="注册IP") + wechat_openid = Column(VARCHAR(64), nullable=True, comment="微信openId") + country = Column(VARCHAR(64), nullable=True, comment="国家") + province = Column(VARCHAR(64), nullable=True, comment="省") + city = Column(VARCHAR(64), nullable=True, comment="市") + q_limit_day = Column(Integer, default=0, comment="每日问题数量限制") + tokens_limit = Column(Integer, default=0, comment="字数限制") + # role_id = Column(Integer, comment="角色") + # role = relationship("Role", secondary=user_role) + is_active = Column(Integer, default=True, comment="用户是否激活 0=未激活 1=激活", server_default="1") + __table_args__ = ({'comment': '用户表'}) + +class Role(Base): + """ + 角色表 + """ + __tablename__ = "nlt_role" + role_id = Column(VARCHAR(32), default=gen_uuid, index=True, primary_key=True, unique=True, comment="角色id") + name = Column(VARCHAR(128), nullable=False, comment="角色名") + title = Column(VARCHAR(128), comment="角色名(中文)") + users = relationship("User", backref="roles", secondary=user_role) + # menu = relationship("Menu", secondary=menu.role_menu) + __table_args__ = ({'comment': '角色表'}) + +class LoginHistory(Base): + """ + 登录记录 + """ + __tablename__ = "nlt_login_history" + user_id = Column(VARCHAR(32), ForeignKey('nlt_user.user_id')) + username = Column(VARCHAR(128), comment="用户名") + login_time = Column(DateTime, default=datetime.now, comment="登录时间") + login_ipv4 = Column(VARCHAR(15), comment="登录ip") + user = relationship("User", backref="login_histories") \ No newline at end of file diff --git a/app/models/chat.py b/app/models/chat.py new file mode 100644 index 0000000..a80802a --- /dev/null +++ b/app/models/chat.py @@ -0,0 +1,40 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : chat.py +# @Software : VSCode +# @Datetime : 2023/03/20 21:46:54 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +""" +聊天模型 +""" + +from datetime import datetime +from sqlalchemy import Column, Integer, Text, Date, DateTime, ForeignKey, VARCHAR +from sqlalchemy.orm import relationship + +from db.base_class import Base + +class ChatHistory(Base): + """ + 聊天记录 + """ + __tablename__ = "nlt_chat_history" + user_id = Column(VARCHAR(32), ForeignKey('nlt_user.user_id')) + username = Column(VARCHAR(128), comment="用户名") + q_content = Column(Text, nullable=False, comment="提问内容") + a_content = Column(Text, nullable=False, comment="回答内容") + q_time = Column(DateTime, default=datetime.now, comment="提问时间") + user = relationship("User", backref="chat_history") + +class ChatCountDay(Base): + """ + 日聊天计数 + """ + __tablename__ = "nlt_chat_count_day" + user_id = Column(VARCHAR(32), ForeignKey('nlt_user.user_id')) + q_time = Column(Date, default=datetime.now, comment="聊天计数日期") + q_times = Column(Integer, default=0, comment="聊天计数") + user = relationship("User", backref="chat_count_day") \ No newline at end of file diff --git a/app/models/menu.py b/app/models/menu.py new file mode 100644 index 0000000..bc62500 --- /dev/null +++ b/app/models/menu.py @@ -0,0 +1,40 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : menu.py +# @Software : VSCode +# @Datetime : 2021/11/08 21:50:31 +# @Author : leo liu +# @Version : 1.0 +# @Description : 前端菜单 + +""" +菜单模型 +""" + +from datetime import datetime +from sqlalchemy import Column, Integer, VARCHAR, ForeignKey, Table, DateTime +from sqlalchemy.orm import relationship +from sqlalchemy.sql import func + +from db.base_class import Base, gen_uuid + +role_menu = Table( + "nlt_rolemenu", + Base.metadata, + Column("id", Integer, unique=True, index=True, primary_key=True, autoincrement=True, comment="ID"), + Column("role_id", VARCHAR(32), ForeignKey("nlt_role.role_id"), nullable=False, primary_key=True, comment="角色id"), + Column("menu_id", VARCHAR(32), ForeignKey("nlt_menu.menu_id"), nullable=False, primary_key=True, comment="菜单id"), + Column("create_time", DateTime, default=datetime.now, server_default=func.now(), comment="创建时间") +) + +class Menu(Base): + """ + 菜单表 + """ + __tablename__ = "nlt_menu" + menu_id = Column(VARCHAR(32), default=gen_uuid, index=True, primary_key=True, unique=True, comment="菜单id") + path = Column(VARCHAR(64), unique=True, nullable=False, comment="菜单地址") + name = Column(VARCHAR(32), unique=True, nullable=False, comment="菜单名称") + super_menu = Column(VARCHAR(64), nullable=True, comment="上级菜单id") + role = relationship("Role", backref="menus", secondary=role_menu) + __table_args__ = ({'comment': '菜单表'}) \ No newline at end of file diff --git a/app/models/settings.py b/app/models/settings.py new file mode 100644 index 0000000..37592bc --- /dev/null +++ b/app/models/settings.py @@ -0,0 +1,24 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : settings.py +# @Software : VSCode +# @Datetime : 2023/03/23 13:55:46 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +''' +系统配置模型 +''' + +from db.base_class import Base +from sqlalchemy import Column, VARCHAR + +class Settings(Base): + """ + 系统配置表 + """ + __tablename__ = "nlt_settings" + key = Column(VARCHAR(32), nullable=False, comment="配置类型") + value = Column(VARCHAR(64), nullable=False, comment="配置值") + __table_args__ = ({'comment': '系统配置表'}) diff --git a/app/tests/__init__.py b/app/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/tests/get_user.py b/app/tests/get_user.py new file mode 100644 index 0000000..5bfb4f1 --- /dev/null +++ b/app/tests/get_user.py @@ -0,0 +1,16 @@ +import sys +sys.path.append("..") + +from db.session import SessionLocal +from api.v1.auth.crud.user import curd_user + +def main() -> None: + + db = SessionLocal() + + user = curd_user.get_by_username(db, username="admin") + print(f"获取用户{user.nickname}成功") + # print("123") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/app/tests/init_data.py b/app/tests/init_data.py new file mode 100644 index 0000000..8d37836 --- /dev/null +++ b/app/tests/init_data.py @@ -0,0 +1,116 @@ +import sys +sys.path.append("..") + +from db.session import SessionLocal, engine +from db.base_class import Base +from models.auth import Role, User +from models.menu import Menu +from api.v1.auth.crud.user import crud_user + +# from core.security import get_password_hash + +def main() -> None: + + Base.metadata.create_all(engine) + + db = SessionLocal() + # create_user(db) + create_menu(db) + # create_auth_data(db) + + # get_userinfo(db) + +def create_auth_data(db): + user_in_1 = User( + username="admin", + # hashed_password=get_password_hash('1qazxsw2'), + nickname="Leo Liu", + email="10200039@qq.com", + avatar="http://www.koelndom.top:13060/public/upload/642/5f81bd14eebb2e000d000002/images/logo/6115abbb55b35ac4c047a00b1272d475.jpg" + ) + + user_in_2 = User( + username="novelliou", + # hashed_password=get_password_hash('1qazxsw2'), + nickname="Leo", + email="leo@koelndom.cn", + avatar="http://www.koelndom.top:13060/public/upload/642/5f81bd14eebb2e000d000002/images/logo/6115abbb55b35ac4c047a00b1272d475.jpg" + ) + + role_in_1 = Role( + name="admin", + title="管理员" + ) + + role_in_2 = Role( + name="user", + title="用户" + ) + + menu_in_1 = Menu( + path = "/system", + name = "System" + ) + + menu_in_2 = Menu( + path = "/system/auth", + name = "Auth", + super_menu = "/system" + ) + + menu_in_3 = Menu( + path = "/system/auth/user", + name = "User", + super_menu = "/system/auth" + ) + + menu_in_4 = Menu( + path = "/system/auth/role", + name = "Role", + super_menu = "/system/auth" + ) + + menu_in_5 = Menu( + path = "/system/logs", + name = "Logs", + super_menu = "/system" + ) + + """ role_in_1.menus = [menu_in_1, menu_in_2, menu_in_3, menu_in_4, menu_in_5, menu_in_6, menu_in_7] + role_in_2.menus = [menu_in_1, menu_in_2, menu_in_3, menu_in_4, menu_in_5, menu_in_7] + + user_in_1.roles = [role_in_1] + user_in_2.roles = [role_in_2] """ + + role_in_1.menus.append(menu_in_1) + role_in_1.menus.append(menu_in_2) + role_in_1.menus.append(menu_in_3) + role_in_1.menus.append(menu_in_4) + role_in_1.menus.append(menu_in_5) + + user_in_1.roles.append(role_in_1) + user_in_2.roles.append(role_in_2) + + db.add(user_in_1) + db.add(user_in_2) + db.commit() + db.refresh(user_in_1) + +def get_userinfo(db): + user = crud_user.get_by_username(db, username="novelliou") + + for menu in user.roles[0].menus: + print(menu.path) + +def create_menu(db): + menu_in = Menu( + path = "/system/auth/menu", + name = "Menu", + super_menu = "/system/auth" + ) + + db.add(menu_in) + db.commit() + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/app/tests/login.py b/app/tests/login.py new file mode 100644 index 0000000..f1853fe --- /dev/null +++ b/app/tests/login.py @@ -0,0 +1,18 @@ +import sys +sys.path.append("..") + +from db.session import SessionLocal +from api.v1.auth.crud.user import curd_user + +def main() -> None: + + db = SessionLocal() + + user = curd_user.authenticate(db, username="admin", password="1qazxsw2") + if (user): + print(f"用户{user.nickname}登录成功") + else: + print(f"登录失败") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/app/utils/__init__.py b/app/utils/__init__.py new file mode 100644 index 0000000..1831415 --- /dev/null +++ b/app/utils/__init__.py @@ -0,0 +1,14 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : __init__.py +# @Software : VSCode +# @Datetime : 2021/11/03 17:15:36 +# @Author : leo liu +# @Version : 1.0 +# @Description : + + +""" + +""" + diff --git a/app/utils/__pycache__/__init__.cpython-310.pyc b/app/utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..b1cd66f Binary files /dev/null and b/app/utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/app/utils/__pycache__/__init__.cpython-39.pyc b/app/utils/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..70b8b49 Binary files /dev/null and b/app/utils/__pycache__/__init__.cpython-39.pyc differ diff --git a/app/utils/__pycache__/custom_exc.cpython-310.pyc b/app/utils/__pycache__/custom_exc.cpython-310.pyc new file mode 100644 index 0000000..baf5d75 Binary files /dev/null and b/app/utils/__pycache__/custom_exc.cpython-310.pyc differ diff --git a/app/utils/__pycache__/custom_exc.cpython-39.pyc b/app/utils/__pycache__/custom_exc.cpython-39.pyc new file mode 100644 index 0000000..201a845 Binary files /dev/null and b/app/utils/__pycache__/custom_exc.cpython-39.pyc differ diff --git a/app/utils/__pycache__/messages.cpython-310.pyc b/app/utils/__pycache__/messages.cpython-310.pyc new file mode 100644 index 0000000..158a0e8 Binary files /dev/null and b/app/utils/__pycache__/messages.cpython-310.pyc differ diff --git a/app/utils/__pycache__/messages.cpython-39.pyc b/app/utils/__pycache__/messages.cpython-39.pyc new file mode 100644 index 0000000..5f5edeb Binary files /dev/null and b/app/utils/__pycache__/messages.cpython-39.pyc differ diff --git a/app/utils/__pycache__/response_code.cpython-310.pyc b/app/utils/__pycache__/response_code.cpython-310.pyc new file mode 100644 index 0000000..10f6fbc Binary files /dev/null and b/app/utils/__pycache__/response_code.cpython-310.pyc differ diff --git a/app/utils/__pycache__/response_code.cpython-39.pyc b/app/utils/__pycache__/response_code.cpython-39.pyc new file mode 100644 index 0000000..1f4cfc5 Binary files /dev/null and b/app/utils/__pycache__/response_code.cpython-39.pyc differ diff --git a/app/utils/custom_exc.py b/app/utils/custom_exc.py new file mode 100644 index 0000000..081661a --- /dev/null +++ b/app/utils/custom_exc.py @@ -0,0 +1,29 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : custom_exc.py +# @Software : VSCode +# @Datetime : 2021/11/03 17:15:42 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +""" + +自定义异常 + +""" + +from .messages import msg + +class PostParamsError(Exception): + def __init__(self, err_desc: str=msg.MSG_WRONG_USERINFO): + self.err_desc = err_desc + + +class TokenAuthError(Exception): + def __init__(self, err_desc: str=msg.MSG_WRONG_TOKEN): + self.err_desc = err_desc + +class UserNotFound(Exception): + def __init__(self, err_desc: str=msg.MSG_USER_UNSIGNUP): + self.err_desc = err_desc diff --git a/app/utils/messages.py b/app/utils/messages.py new file mode 100644 index 0000000..81a6080 --- /dev/null +++ b/app/utils/messages.py @@ -0,0 +1,19 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : messages.py +# @Software : VSCode +# @Datetime : 2021/11/04 21:14:47 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +class MessagesCn: + # 用户相关 + MSG_WRONG_USERINFO: str = "用户名或者密码错误" + MSG_WRONG_POST_PARAMS: str = "POST请求参数错误" + MSG_WRONG_TOKEN: str = "token认证失败" + MSG_USER_INACTIVE: str = "用户未激活" + MSG_USER_UNSIGNUP: str = "未找到有效用户" + MSG_USER_LOGIN_SUCCESS: str = "用户登录成功" + +msg = MessagesCn() \ No newline at end of file diff --git a/app/utils/response_code.py b/app/utils/response_code.py new file mode 100644 index 0000000..18f9232 --- /dev/null +++ b/app/utils/response_code.py @@ -0,0 +1,86 @@ +# !/usr/bin/env python3 +# -*- encoding : utf-8 -*- +# @Filename : response_code.py +# @Software : VSCode +# @Datetime : 2021/11/03 17:16:03 +# @Author : leo liu +# @Version : 1.0 +# @Description : + +""" +定义返回的状态 + +# 看到文档说这个orjson 能压缩性能(squeezing performance) +https://fastapi.tiangolo.com/advanced/custom-response/#use-orjsonresponse + +It's possible that ORJSONResponse might be a faster alternative. + +# 安装 +pip install --upgrade orjson + +测试了下,序列化某些特殊的字段不友好,比如小数 +TypeError: Type is not JSON serializable: decimal.Decimal +""" +from fastapi import status +from fastapi.responses import JSONResponse, Response, ORJSONResponse + +from typing import Union + +def resp_200(data: Union[list, dict, str]=None, *, message: str="Success") -> dict: + return { + 'code': 200, + 'message': message, + 'data': data, + } + +def resp_403(data: str = None) -> Response: + return ORJSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content={ + 'code': 403, + 'message': "Forbidden", + 'data': data, + } + ) + +def resp_404(data: str = None) -> Response: + return ORJSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content={ + 'code': 404, + 'message': "Page Not Found", + 'data': data, + } + ) + +def resp_500(data: str = None) -> Response: + return ORJSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content={ + 'code': "500", + 'message': "Server internal error", + 'data': data, + } + ) + +# 自定义 +def resp_5000(data: Union[list, dict, str]) -> Response: + return JSONResponse( + status_code=status.HTTP_200_OK, + content={ + 'code': 5000, + 'message': "Token failure", + 'data': data, + } + ) + +# 用户创建失败 +def resp_5010(message: str="Error") -> Response: + return JSONResponse( + status_code=status.HTTP_200_OK, + content={ + 'code': 5010, + 'message': message, + 'data': None + } + ) diff --git a/venv/bin/Activate.ps1 b/venv/bin/Activate.ps1 new file mode 100644 index 0000000..b49d77b --- /dev/null +++ b/venv/bin/Activate.ps1 @@ -0,0 +1,247 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove VIRTUAL_ENV_PROMPT altogether. + if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) { + Remove-Item -Path env:VIRTUAL_ENV_PROMPT + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } + $env:VIRTUAL_ENV_PROMPT = $Prompt +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/venv/bin/activate b/venv/bin/activate new file mode 100644 index 0000000..785b59f --- /dev/null +++ b/venv/bin/activate @@ -0,0 +1,69 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r 2> /dev/null + fi + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + unset VIRTUAL_ENV_PROMPT + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV="/home/leo/Work/openai/JmedChat/venv" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + PS1="(venv) ${PS1:-}" + export PS1 + VIRTUAL_ENV_PROMPT="(venv) " + export VIRTUAL_ENV_PROMPT +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r 2> /dev/null +fi diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh new file mode 100644 index 0000000..0b82886 --- /dev/null +++ b/venv/bin/activate.csh @@ -0,0 +1,26 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/home/leo/Work/openai/JmedChat/venv" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + set prompt = "(venv) $prompt" + setenv VIRTUAL_ENV_PROMPT "(venv) " +endif + +alias pydoc python -m pydoc + +rehash diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish new file mode 100644 index 0000000..299dd47 --- /dev/null +++ b/venv/bin/activate.fish @@ -0,0 +1,66 @@ +# This file must be used with "source /bin/activate.fish" *from fish* +# (https://fishshell.com/); you cannot run it directly. + +function deactivate -d "Exit virtual environment and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + set -e VIRTUAL_ENV_PROMPT + if test "$argv[1]" != "nondestructive" + # Self-destruct! + functions -e deactivate + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV "/home/leo/Work/openai/JmedChat/venv" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# Unset PYTHONHOME if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # Save the current fish_prompt function as the function _old_fish_prompt. + functions -c fish_prompt _old_fish_prompt + + # With the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command. + set -l old_status $status + + # Output the venv prompt; color taken from the blue of the Python logo. + printf "%s%s%s" (set_color 4B8BBE) "(venv) " (set_color normal) + + # Restore the return status of the previous command. + echo "exit $old_status" | . + # Output the original/"old" prompt. + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" + set -gx VIRTUAL_ENV_PROMPT "(venv) " +end diff --git a/venv/bin/email_validator b/venv/bin/email_validator new file mode 100755 index 0000000..3822fc7 --- /dev/null +++ b/venv/bin/email_validator @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from email_validator import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/normalizer b/venv/bin/normalizer new file mode 100755 index 0000000..4c8593d --- /dev/null +++ b/venv/bin/normalizer @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from charset_normalizer.cli.normalizer import cli_detect +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli_detect()) diff --git a/venv/bin/openai b/venv/bin/openai new file mode 100755 index 0000000..2bbf7be --- /dev/null +++ b/venv/bin/openai @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from openai._openai_scripts import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip b/venv/bin/pip new file mode 100755 index 0000000..089042e --- /dev/null +++ b/venv/bin/pip @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip3 b/venv/bin/pip3 new file mode 100755 index 0000000..089042e --- /dev/null +++ b/venv/bin/pip3 @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip3.10 b/venv/bin/pip3.10 new file mode 100755 index 0000000..089042e --- /dev/null +++ b/venv/bin/pip3.10 @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pyrsa-decrypt b/venv/bin/pyrsa-decrypt new file mode 100755 index 0000000..3b64b95 --- /dev/null +++ b/venv/bin/pyrsa-decrypt @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from rsa.cli import decrypt +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(decrypt()) diff --git a/venv/bin/pyrsa-encrypt b/venv/bin/pyrsa-encrypt new file mode 100755 index 0000000..f8d756d --- /dev/null +++ b/venv/bin/pyrsa-encrypt @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from rsa.cli import encrypt +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(encrypt()) diff --git a/venv/bin/pyrsa-keygen b/venv/bin/pyrsa-keygen new file mode 100755 index 0000000..a481669 --- /dev/null +++ b/venv/bin/pyrsa-keygen @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from rsa.cli import keygen +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(keygen()) diff --git a/venv/bin/pyrsa-priv2pub b/venv/bin/pyrsa-priv2pub new file mode 100755 index 0000000..2027ee5 --- /dev/null +++ b/venv/bin/pyrsa-priv2pub @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from rsa.util import private_to_public +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(private_to_public()) diff --git a/venv/bin/pyrsa-sign b/venv/bin/pyrsa-sign new file mode 100755 index 0000000..1987ffc --- /dev/null +++ b/venv/bin/pyrsa-sign @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from rsa.cli import sign +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(sign()) diff --git a/venv/bin/pyrsa-verify b/venv/bin/pyrsa-verify new file mode 100755 index 0000000..9175ef9 --- /dev/null +++ b/venv/bin/pyrsa-verify @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from rsa.cli import verify +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(verify()) diff --git a/venv/bin/python b/venv/bin/python new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/venv/bin/python @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/venv/bin/python3 b/venv/bin/python3 new file mode 120000 index 0000000..ae65fda --- /dev/null +++ b/venv/bin/python3 @@ -0,0 +1 @@ +/usr/bin/python3 \ No newline at end of file diff --git a/venv/bin/python3.10 b/venv/bin/python3.10 new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/venv/bin/python3.10 @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/venv/bin/tqdm b/venv/bin/tqdm new file mode 100755 index 0000000..f05a26d --- /dev/null +++ b/venv/bin/tqdm @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from tqdm.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/uvicorn b/venv/bin/uvicorn new file mode 100755 index 0000000..7cdf688 --- /dev/null +++ b/venv/bin/uvicorn @@ -0,0 +1,8 @@ +#!/home/leo/Work/openai/JmedChat/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from uvicorn.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/include/site/python3.10/greenlet/greenlet.h b/venv/include/site/python3.10/greenlet/greenlet.h new file mode 100644 index 0000000..830bef8 --- /dev/null +++ b/venv/include/site/python3.10/greenlet/greenlet.h @@ -0,0 +1,146 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ + +/* Greenlet object interface */ + +#ifndef Py_GREENLETOBJECT_H +#define Py_GREENLETOBJECT_H + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* This is deprecated and undocumented. It does not change. */ +#define GREENLET_VERSION "1.0.0" + +typedef struct _greenlet { + PyObject_HEAD + char* stack_start; + char* stack_stop; + char* stack_copy; + intptr_t stack_saved; + struct _greenlet* stack_prev; + struct _greenlet* parent; + PyObject* run_info; + struct _frame* top_frame; + int recursion_depth; + PyObject* weakreflist; +#if PY_VERSION_HEX >= 0x030700A3 + _PyErr_StackItem* exc_info; + _PyErr_StackItem exc_state; +#else + PyObject* exc_type; + PyObject* exc_value; + PyObject* exc_traceback; +#endif + PyObject* dict; +#if PY_VERSION_HEX >= 0x030700A3 + PyObject* context; +#endif +#if PY_VERSION_HEX >= 0x30A00B1 + CFrame* cframe; +#endif +} PyGreenlet; + +#define PyGreenlet_Check(op) PyObject_TypeCheck(op, &PyGreenlet_Type) +#define PyGreenlet_MAIN(op) (((PyGreenlet*)(op))->stack_stop == (char*)-1) +#define PyGreenlet_STARTED(op) (((PyGreenlet*)(op))->stack_stop != NULL) +#define PyGreenlet_ACTIVE(op) (((PyGreenlet*)(op))->stack_start != NULL) +#define PyGreenlet_GET_PARENT(op) (((PyGreenlet*)(op))->parent) + +/* C API functions */ + +/* Total number of symbols that are exported */ +#define PyGreenlet_API_pointers 8 + +#define PyGreenlet_Type_NUM 0 +#define PyExc_GreenletError_NUM 1 +#define PyExc_GreenletExit_NUM 2 + +#define PyGreenlet_New_NUM 3 +#define PyGreenlet_GetCurrent_NUM 4 +#define PyGreenlet_Throw_NUM 5 +#define PyGreenlet_Switch_NUM 6 +#define PyGreenlet_SetParent_NUM 7 + +#ifndef GREENLET_MODULE +/* This section is used by modules that uses the greenlet C API */ +static void** _PyGreenlet_API = NULL; + +# define PyGreenlet_Type \ + (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) + +# define PyExc_GreenletError \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) + +# define PyExc_GreenletExit \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) + +/* + * PyGreenlet_New(PyObject *args) + * + * greenlet.greenlet(run, parent=None) + */ +# define PyGreenlet_New \ + (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ + _PyGreenlet_API[PyGreenlet_New_NUM]) + +/* + * PyGreenlet_GetCurrent(void) + * + * greenlet.getcurrent() + */ +# define PyGreenlet_GetCurrent \ + (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) + +/* + * PyGreenlet_Throw( + * PyGreenlet *greenlet, + * PyObject *typ, + * PyObject *val, + * PyObject *tb) + * + * g.throw(...) + */ +# define PyGreenlet_Throw \ + (*(PyObject * (*)(PyGreenlet * self, \ + PyObject * typ, \ + PyObject * val, \ + PyObject * tb)) \ + _PyGreenlet_API[PyGreenlet_Throw_NUM]) + +/* + * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) + * + * g.switch(*args, **kwargs) + */ +# define PyGreenlet_Switch \ + (*(PyObject * \ + (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ + _PyGreenlet_API[PyGreenlet_Switch_NUM]) + +/* + * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) + * + * g.parent = new_parent + */ +# define PyGreenlet_SetParent \ + (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ + _PyGreenlet_API[PyGreenlet_SetParent_NUM]) + +/* Macro that imports greenlet and initializes C API */ +/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we + keep the older definition to be sure older code that might have a copy of + the header still works. */ +# define PyGreenlet_Import() \ + { \ + _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ + } + +#endif /* GREENLET_MODULE */ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_GREENLETOBJECT_H */ diff --git a/venv/lib/python3.10/site-packages/CHANGELOG.md b/venv/lib/python3.10/site-packages/CHANGELOG.md new file mode 100644 index 0000000..3781be9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/CHANGELOG.md @@ -0,0 +1,235 @@ +# Python-RSA changelog + +## Version 4.8 - in development + +- Switch to [Poetry](https://python-poetry.org/) for dependency and release management. +- Compatibility with Python 3.10. +- Chain exceptions using `raise new_exception from old_exception` + ([#157](https://github.com/sybrenstuvel/python-rsa/pull/157)) +- Added marker file for PEP 561. This will allow type checking tools in dependent projects + to use type annotations from Python-RSA + ([#136](https://github.com/sybrenstuvel/python-rsa/pull/136)). +- Use the Chinese Remainder Theorem when decrypting with a private key. This + makes decryption 2-4x faster + ([#163](https://github.com/sybrenstuvel/python-rsa/pull/163)). + +## Version 4.7.2 - released 2021-02-24 + +- Fix picking/unpickling issue introduced in 4.7 + ([#173](https://github.com/sybrenstuvel/python-rsa/issues/173)) + +## Version 4.7.1 - released 2021-02-15 + +- Fix threading issue introduced in 4.7 + ([#173](https://github.com/sybrenstuvel/python-rsa/issues/173)) + +## Version 4.7 - released 2021-01-10 + +- Fix [#165](https://github.com/sybrenstuvel/python-rsa/issues/165): + CVE-2020-25658 - Bleichenbacher-style timing oracle in PKCS#1 v1.5 decryption + code +- Add padding length check as described by PKCS#1 v1.5 (Fixes + [#164](https://github.com/sybrenstuvel/python-rsa/issues/164)) +- Reuse of blinding factors to speed up blinding operations. + Fixes [#162](https://github.com/sybrenstuvel/python-rsa/issues/162). +- Declare & test support for Python 3.9 + + +## Version 4.4 & 4.6 - released 2020-06-12 + +Version 4.4 and 4.6 are almost a re-tagged release of version 4.2. It requires +Python 3.5+. To avoid older Python installations from trying to upgrade to RSA +4.4, this is now made explicit in the `python_requires` argument in `setup.py`. +There was a mistake releasing 4.4 as "3.5+ only", which made it necessary to +retag 4.4 as 4.6 as well. + +No functional changes compared to version 4.2. + + +## Version 4.3 & 4.5 - released 2020-06-12 + +Version 4.3 and 4.5 are almost a re-tagged release of version 4.0. It is the +last to support Python 2.7. This is now made explicit in the `python_requires` +argument in `setup.py`. Python 3.4 is not supported by this release. There was a +mistake releasing 4.4 as "3.5+ only", which made it necessary to retag 4.3 as +4.5 as well. + +Two security fixes have also been backported, so 4.3 = 4.0 + these two fixes. + +- Choose blinding factor relatively prime to N. Thanks Christian Heimes for pointing this out. +- Reject cyphertexts (when decrypting) and signatures (when verifying) that have + been modified by prepending zero bytes. This resolves CVE-2020-13757. Thanks + Carnil for pointing this out. + + +## Version 4.2 - released 2020-06-10 + +- Rolled back the switch to Poetry, and reverted back to using Pipenv + setup.py + for dependency management. There apparently is an issue no-binary installs of + packages build with Poetry. This fixes + [#148](https://github.com/sybrenstuvel/python-rsa/issues/148) +- Limited SHA3 support to those Python versions (3.6+) that support it natively. + The third-party library that adds support for this to Python 3.5 is a binary + package, and thus breaks the pure-Python nature of Python-RSA. + This should fix [#147](https://github.com/sybrenstuvel/python-rsa/issues/147). + + +## Version 4.1 - released 2020-06-10 + +- Added support for Python 3.8. +- Dropped support for Python 2 and 3.4. +- Added type annotations to the source code. This will make Python-RSA easier to use in + your IDE, and allows better type checking. +- Added static type checking via [MyPy](http://mypy-lang.org/). +- Fix [#129](https://github.com/sybrenstuvel/python-rsa/issues/129) Installing from source + gives UnicodeDecodeError. +- Switched to using [Poetry](https://poetry.eustace.io/) for package + management. +- Added support for SHA3 hashing: SHA3-256, SHA3-384, SHA3-512. This + is natively supported by Python 3.6+ and supported via a third-party + library on Python 3.5. +- Choose blinding factor relatively prime to N. Thanks Christian Heimes for pointing this out. +- Reject cyphertexts (when decrypting) and signatures (when verifying) that have + been modified by prepending zero bytes. This resolves CVE-2020-13757. Thanks + Adelapie for pointing this out. + + +## Version 4.0 - released 2018-09-16 + +- Removed deprecated modules: + - rsa.varblock + - rsa.bigfile + - rsa._version133 + - rsa._version200 +- Removed CLI commands that use the VARBLOCK/bigfile format. +- Ensured that PublicKey.save_pkcs1() and PrivateKey.save_pkcs1() always return bytes. +- Dropped support for Python 2.6 and 3.3. +- Dropped support for Psyco. +- Miller-Rabin iterations determined by bitsize of key. + [#58](https://github.com/sybrenstuvel/python-rsa/pull/58) +- Added function `rsa.find_signature_hash()` to return the name of the hashing + algorithm used to sign a message. `rsa.verify()` now also returns that name, + instead of always returning `True`. + [#78](https://github.com/sybrenstuvel/python-rsa/issues/13) +- Add support for SHA-224 for PKCS1 signatures. + [#104](https://github.com/sybrenstuvel/python-rsa/pull/104) +- Transitioned from `requirements.txt` to Pipenv for package management. + + +## Version 3.4.2 - released 2016-03-29 + +- Fixed dates in CHANGELOG.txt + + +## Version 3.4.1 - released 2016-03-26 + +- Included tests/private.pem in MANIFEST.in +- Included README.md and CHANGELOG.txt in MANIFEST.in + + +## Version 3.4 - released 2016-03-17 + +- Moved development to GitHub: https://github.com/sybrenstuvel/python-rsa +- Solved side-channel vulnerability by implementing blinding, fixes #19 +- Deprecated the VARBLOCK format and rsa.bigfile module due to security issues, see + https://github.com/sybrenstuvel/python-rsa/issues/13 +- Integration with Travis-CI [1], Coveralls [2] and Code Climate [3] +- Deprecated the old rsa._version133 and rsa._version200 submodules, they will be + completely removed in version 4.0. +- Add an 'exponent' argument to key.newkeys() +- Switched from Solovay-Strassen to Miller-Rabin primality testing, to + comply with NIST FIPS 186-4 [4] as probabilistic primality test + (Appendix C, subsection C.3): +- Fixed bugs #12, #14, #27, #30, #49 + +[1] https://travis-ci.org/sybrenstuvel/python-rsa +[2] https://coveralls.io/github/sybrenstuvel/python-rsa +[3] https://codeclimate.com/github/sybrenstuvel/python-rsa +[4] http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + + +## Version 3.3 - released 2016-01-13 + +- Thanks to Filippo Valsorda: Fix BB'06 attack in verify() by + switching from parsing to comparison. See [1] for more information. +- Simplified Tox configuration and dropped Python 3.2 support. The + coverage package uses a u'' prefix, which was reintroduced in 3.3 + for ease of porting. + +[1] https://blog.filippo.io/bleichenbacher-06-signature-forgery-in-python-rsa/ + + +## Version 3.2.3 - released 2015-11-05 + +- Added character encoding markers for Python 2.x + + +## Version 3.2.1 - released 2015-11-05 + +- Added per-file licenses +- Added support for wheel packages +- Made example code more consistent and up to date with Python 3.4 + + +## Version 3.2 - released 2015-07-29 + +- Mentioned support for Python 3 in setup.py + + +## Version 3.1.4 - released 2014-02-22 + +- Fixed some bugs + + +## Version 3.1.3 - released 2014-02-02 + +- Dropped support for Python 2.5 + + +## Version 3.1.2 - released 2013-09-15 + +- Added Python 3.3 to the test environment. +- Removed dependency on Distribute +- Added support for loading public keys from OpenSSL + + +## Version 3.1.1 - released 2012-06-18 + +- Fixed doctests for Python 2.7 +- Removed obsolete unittest so all tests run fine on Python 3.2 + +## Version 3.1 - released 2012-06-17 + +- Big, big credits to Yesudeep Mangalapilly for all the changes listed + below! +- Added ability to generate keys on multiple cores simultaneously. +- Massive speedup +- Partial Python 3.2 compatibility (core functionality works, but + saving or loading keys doesn't, for that the pyasn1 package needs to + be ported to Python 3 first) +- Lots of bug fixes + + + +## Version 3.0.1 - released 2011-08-07 + +- Removed unused import of abc module + + +## Version 3.0 - released 2011-08-05 + +- Changed the meaning of the keysize to mean the size of ``n`` rather than + the size of both ``p`` and ``q``. This is the common interpretation of + RSA keysize. To get the old behaviour, double the keysize when generating a + new key. +- Added a lot of doctests +- Added random-padded encryption and decryption using PKCS#1 version 1.5 +- Added hash-based signatures and verification using PKCS#1v1.5 +- Modeling private and public key as real objects rather than dicts. +- Support for saving and loading keys as PEM and DER files. +- Ability to extract a public key from a private key (PEM+DER) + + +## Version 2.0 + +- Security improvements by Barry Mead. diff --git a/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/LICENSE.rst b/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/LICENSE.rst new file mode 100644 index 0000000..191ddaf --- /dev/null +++ b/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/LICENSE.rst @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017 Laurent LAPORTE + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/METADATA b/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/METADATA new file mode 100644 index 0000000..b588a55 --- /dev/null +++ b/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/METADATA @@ -0,0 +1,187 @@ +Metadata-Version: 2.1 +Name: Deprecated +Version: 1.2.13 +Summary: Python @deprecated decorator to deprecate old python classes, functions or methods. +Home-page: https://github.com/tantale/deprecated +Author: Laurent LAPORTE +Author-email: tantale.solutions@gmail.com +License: MIT +Project-URL: Documentation, https://deprecated.readthedocs.io/en/latest/ +Project-URL: Source, https://github.com/tantale/deprecated +Project-URL: Bug Tracker, https://github.com/tantale/deprecated/issues +Keywords: deprecate,deprecated,deprecation,warning,warn,decorator +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: wrapt (<2,>=1.10) +Provides-Extra: dev +Requires-Dist: tox ; extra == 'dev' +Requires-Dist: bump2version (<1) ; extra == 'dev' +Requires-Dist: sphinx (<2) ; extra == 'dev' +Requires-Dist: importlib-metadata (<3) ; (python_version < "3") and extra == 'dev' +Requires-Dist: importlib-resources (<4) ; (python_version < "3") and extra == 'dev' +Requires-Dist: configparser (<5) ; (python_version < "3") and extra == 'dev' +Requires-Dist: sphinxcontrib-websupport (<2) ; (python_version < "3") and extra == 'dev' +Requires-Dist: zipp (<2) ; (python_version < "3") and extra == 'dev' +Requires-Dist: PyTest (<5) ; (python_version < "3.6") and extra == 'dev' +Requires-Dist: PyTest-Cov (<2.6) ; (python_version < "3.6") and extra == 'dev' +Requires-Dist: PyTest ; (python_version >= "3.6") and extra == 'dev' +Requires-Dist: PyTest-Cov ; (python_version >= "3.6") and extra == 'dev' + + +Deprecated Library +------------------ + +Deprecated is Easy to Use +````````````````````````` + +If you need to mark a function or a method as deprecated, +you can use the ``@deprecated`` decorator: + +Save in a hello.py: + +.. code:: python + + from deprecated import deprecated + + + @deprecated(version='1.2.1', reason="You should use another function") + def some_old_function(x, y): + return x + y + + + class SomeClass(object): + @deprecated(version='1.3.0', reason="This method is deprecated") + def some_old_method(self, x, y): + return x + y + + + some_old_function(12, 34) + obj = SomeClass() + obj.some_old_method(5, 8) + + +And Easy to Setup +````````````````` + +And run it: + +.. code:: bash + + $ pip install Deprecated + $ python hello.py + hello.py:15: DeprecationWarning: Call to deprecated function (or staticmethod) some_old_function. + (You should use another function) -- Deprecated since version 1.2.0. + some_old_function(12, 34) + hello.py:17: DeprecationWarning: Call to deprecated method some_old_method. + (This method is deprecated) -- Deprecated since version 1.3.0. + obj.some_old_method(5, 8) + + +You can document your code +`````````````````````````` + +Have you ever wonder how to document that some functions, classes, methods, etc. are deprecated? +This is now possible with the integrated Sphinx directives: + +For instance, in hello_sphinx.py: + +.. code:: python + + from deprecated.sphinx import deprecated + from deprecated.sphinx import versionadded + from deprecated.sphinx import versionchanged + + + @versionadded(version='1.0', reason="This function is new") + def function_one(): + '''This is the function one''' + + + @versionchanged(version='1.0', reason="This function is modified") + def function_two(): + '''This is the function two''' + + + @deprecated(version='1.0', reason="This function will be removed soon") + def function_three(): + '''This is the function three''' + + + function_one() + function_two() + function_three() # warns + + help(function_one) + help(function_two) + help(function_three) + + +The result it immediate +``````````````````````` + +Run it: + +.. code:: bash + + $ python hello_sphinx.py + + hello_sphinx.py:23: DeprecationWarning: Call to deprecated function (or staticmethod) function_three. + (This function will be removed soon) -- Deprecated since version 1.0. + function_three() # warns + + Help on function function_one in module __main__: + + function_one() + This is the function one + + .. versionadded:: 1.0 + This function is new + + Help on function function_two in module __main__: + + function_two() + This is the function two + + .. versionchanged:: 1.0 + This function is modified + + Help on function function_three in module __main__: + + function_three() + This is the function three + + .. deprecated:: 1.0 + This function will be removed soon + + +Links +````` + +* `Python package index (PyPi) `_ +* `GitHub website `_ +* `Read The Docs `_ +* `EBook on Lulu.com `_ +* `StackOverFlow Q&A `_ +* `Development version + `_ + + + diff --git a/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/RECORD b/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/RECORD new file mode 100644 index 0000000..3436ea4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/RECORD @@ -0,0 +1,12 @@ +Deprecated-1.2.13.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Deprecated-1.2.13.dist-info/LICENSE.rst,sha256=HoPt0VvkGbXVveNy4yXlJ_9PmRX1SOfHUxS0H2aZ6Dw,1081 +Deprecated-1.2.13.dist-info/METADATA,sha256=3yaMBaEEx4K_RPhU7Bmb8P_UprR9-67MvES0XRGK7go,5817 +Deprecated-1.2.13.dist-info/RECORD,, +Deprecated-1.2.13.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 +Deprecated-1.2.13.dist-info/top_level.txt,sha256=nHbOYawKPQQE5lQl-toUB1JBRJjUyn_m_Mb8RVJ0RjA,11 +deprecated/__init__.py,sha256=YA_PiKdeI1jWCt2FxdMLc9wFZ_DDZFl0OlI6jf6mdeo,349 +deprecated/__pycache__/__init__.cpython-310.pyc,, +deprecated/__pycache__/classic.cpython-310.pyc,, +deprecated/__pycache__/sphinx.cpython-310.pyc,, +deprecated/classic.py,sha256=QugmUi7IhBvp2nDvMtyWqFDPRR43-9nfSZG1ZJSDpFM,9880 +deprecated/sphinx.py,sha256=zrgb7gbK4iixgRbO_AXusI8_Gi4JwrQJZHs3RZArdfE,9988 diff --git a/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/WHEEL b/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/WHEEL new file mode 100644 index 0000000..8b701e9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.6) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/top_level.txt new file mode 100644 index 0000000..9f8d550 --- /dev/null +++ b/venv/lib/python3.10/site-packages/Deprecated-1.2.13.dist-info/top_level.txt @@ -0,0 +1 @@ +deprecated diff --git a/venv/lib/python3.10/site-packages/LICENSE b/venv/lib/python3.10/site-packages/LICENSE new file mode 100644 index 0000000..67589cb --- /dev/null +++ b/venv/lib/python3.10/site-packages/LICENSE @@ -0,0 +1,13 @@ +Copyright 2011 Sybren A. Stüvel + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/LICENSE b/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/LICENSE new file mode 100644 index 0000000..86b18e1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2010, 2013 PyMySQL contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/METADATA b/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/METADATA new file mode 100644 index 0000000..67a732b --- /dev/null +++ b/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/METADATA @@ -0,0 +1,180 @@ +Metadata-Version: 2.1 +Name: PyMySQL +Version: 1.0.2 +Summary: Pure Python MySQL Driver +Home-page: https://github.com/PyMySQL/PyMySQL/ +Author: yutaka.matsubara +Author-email: yutaka.matsubara@gmail.com +Maintainer: Inada Naoki +Maintainer-email: songofacandy@gmail.com +License: "MIT" +Project-URL: Documentation, https://pymysql.readthedocs.io/ +Keywords: MySQL +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Database +Requires-Python: >=3.6 +Provides-Extra: ed25519 +Requires-Dist: PyNaCl (>=1.4.0) ; extra == 'ed25519' +Provides-Extra: rsa +Requires-Dist: cryptography ; extra == 'rsa' + +.. image:: https://readthedocs.org/projects/pymysql/badge/?version=latest + :target: https://pymysql.readthedocs.io/ + :alt: Documentation Status + +.. image:: https://coveralls.io/repos/PyMySQL/PyMySQL/badge.svg?branch=master&service=github + :target: https://coveralls.io/github/PyMySQL/PyMySQL?branch=master + +.. image:: https://img.shields.io/lgtm/grade/python/g/PyMySQL/PyMySQL.svg?logo=lgtm&logoWidth=18 + :target: https://lgtm.com/projects/g/PyMySQL/PyMySQL/context:python + + +PyMySQL +======= + +.. contents:: Table of Contents + :local: + +This package contains a pure-Python MySQL client library, based on `PEP 249`_. + +Most public APIs are compatible with mysqlclient and MySQLdb. + +NOTE: PyMySQL doesn't support low level APIs `_mysql` provides like `data_seek`, +`store_result`, and `use_result`. You should use high level APIs defined in `PEP 249`_. +But some APIs like `autocommit` and `ping` are supported because `PEP 249`_ doesn't cover +their usecase. + +.. _`PEP 249`: https://www.python.org/dev/peps/pep-0249/ + + +Requirements +------------- + +* Python -- one of the following: + + - CPython_ : 3.6 and newer + - PyPy_ : Latest 3.x version + +* MySQL Server -- one of the following: + + - MySQL_ >= 5.6 + - MariaDB_ >= 10.0 + +.. _CPython: https://www.python.org/ +.. _PyPy: https://pypy.org/ +.. _MySQL: https://www.mysql.com/ +.. _MariaDB: https://mariadb.org/ + + +Installation +------------ + +Package is uploaded on `PyPI `_. + +You can install it with pip:: + + $ python3 -m pip install PyMySQL + +To use "sha256_password" or "caching_sha2_password" for authenticate, +you need to install additional dependency:: + + $ python3 -m pip install PyMySQL[rsa] + +To use MariaDB's "ed25519" authentication method, you need to install +additional dependency:: + + $ python3 -m pip install PyMySQL[ed25519] + + +Documentation +------------- + +Documentation is available online: https://pymysql.readthedocs.io/ + +For support, please refer to the `StackOverflow +`_. + + +Example +------- + +The following examples make use of a simple table + +.. code:: sql + + CREATE TABLE `users` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `email` varchar(255) COLLATE utf8_bin NOT NULL, + `password` varchar(255) COLLATE utf8_bin NOT NULL, + PRIMARY KEY (`id`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin + AUTO_INCREMENT=1 ; + + +.. code:: python + + import pymysql.cursors + + # Connect to the database + connection = pymysql.connect(host='localhost', + user='user', + password='passwd', + database='db', + cursorclass=pymysql.cursors.DictCursor) + + with connection: + with connection.cursor() as cursor: + # Create a new record + sql = "INSERT INTO `users` (`email`, `password`) VALUES (%s, %s)" + cursor.execute(sql, ('webmaster@python.org', 'very-secret')) + + # connection is not autocommit by default. So you must commit to save + # your changes. + connection.commit() + + with connection.cursor() as cursor: + # Read a single record + sql = "SELECT `id`, `password` FROM `users` WHERE `email`=%s" + cursor.execute(sql, ('webmaster@python.org',)) + result = cursor.fetchone() + print(result) + + +This example will print: + +.. code:: python + + {'password': 'very-secret', 'id': 1} + + +Resources +--------- + +* DB-API 2.0: https://www.python.org/dev/peps/pep-0249/ + +* MySQL Reference Manuals: https://dev.mysql.com/doc/ + +* MySQL client/server protocol: + https://dev.mysql.com/doc/internals/en/client-server-protocol.html + +* "Connector" channel in MySQL Community Slack: + https://lefred.be/mysql-community-on-slack/ + +* PyMySQL mailing list: https://groups.google.com/forum/#!forum/pymysql-users + +License +------- + +PyMySQL is released under the MIT License. See LICENSE for more information. + + diff --git a/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/RECORD b/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/RECORD new file mode 100644 index 0000000..eab1dd9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/RECORD @@ -0,0 +1,43 @@ +PyMySQL-1.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +PyMySQL-1.0.2.dist-info/LICENSE,sha256=MUEg3GXwgA9ziksxQAx27hTezR--d86cNUCkIbhup7Y,1070 +PyMySQL-1.0.2.dist-info/METADATA,sha256=hz4Fdo8sOFKcNqZ8wp4Bp-txNCOBCnw9-leYR7QBZ5I,5119 +PyMySQL-1.0.2.dist-info/RECORD,, +PyMySQL-1.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +PyMySQL-1.0.2.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 +PyMySQL-1.0.2.dist-info/top_level.txt,sha256=IKlV-f4o90sOdnMd6HBvo0l2nqfJOGUzkwZeaEEGuRg,8 +pymysql/__init__.py,sha256=XL7skPUK4cbKiek68T0vMob-L4YkIRLb2KX4hdMZVvM,4391 +pymysql/__pycache__/__init__.cpython-310.pyc,, +pymysql/__pycache__/_auth.cpython-310.pyc,, +pymysql/__pycache__/charset.cpython-310.pyc,, +pymysql/__pycache__/connections.cpython-310.pyc,, +pymysql/__pycache__/converters.cpython-310.pyc,, +pymysql/__pycache__/cursors.cpython-310.pyc,, +pymysql/__pycache__/err.cpython-310.pyc,, +pymysql/__pycache__/optionfile.cpython-310.pyc,, +pymysql/__pycache__/protocol.cpython-310.pyc,, +pymysql/__pycache__/times.cpython-310.pyc,, +pymysql/_auth.py,sha256=l1VtBwDpCtTkalgYQFASO-rj-vEd3DGYR8g-eQjNF1U,7399 +pymysql/charset.py,sha256=JCvshFnNf4vzkpXc6uPCyg07qGNfZaVZoxrFqzVlKFs,10293 +pymysql/connections.py,sha256=EwKWqFIWlx6kbOeDFIhMFpjJ9-pyF140E5ouKgrrYfY,51251 +pymysql/constants/CLIENT.py,sha256=SSvMFPZCTVMU1UWa4zOrfhYMDdR2wG2mS0E5GzJhDsg,878 +pymysql/constants/COMMAND.py,sha256=TGITAUcNWlq2Gwg2wv5UK2ykdTd4LYTk_EcJJOCpGIc,679 +pymysql/constants/CR.py,sha256=oHyD9dnR1DUX7hd42rcamMnFrWhjUZz7E4S6qQWSQb4,1927 +pymysql/constants/ER.py,sha256=cH5wgU-e70wd0uSygNR5IFCnnXcrR9WLwJPMH22bhUw,12296 +pymysql/constants/FIELD_TYPE.py,sha256=ytFzgAnGmb9hvdsBlnK68qdZv_a6jYFIXT6VSAb60z8,370 +pymysql/constants/FLAG.py,sha256=Fy-PrCLnUI7fx_o5WypYnUAzWAM0E9d5yL8fFRVKffY,214 +pymysql/constants/SERVER_STATUS.py,sha256=m28Iq5JGCFCWLhafE73-iOvw_9gDGqnytW3NkHpbugA,333 +pymysql/constants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pymysql/constants/__pycache__/CLIENT.cpython-310.pyc,, +pymysql/constants/__pycache__/COMMAND.cpython-310.pyc,, +pymysql/constants/__pycache__/CR.cpython-310.pyc,, +pymysql/constants/__pycache__/ER.cpython-310.pyc,, +pymysql/constants/__pycache__/FIELD_TYPE.cpython-310.pyc,, +pymysql/constants/__pycache__/FLAG.cpython-310.pyc,, +pymysql/constants/__pycache__/SERVER_STATUS.cpython-310.pyc,, +pymysql/constants/__pycache__/__init__.cpython-310.pyc,, +pymysql/converters.py,sha256=MBXTOCXSyewMculaRliBEzPVkOKXLiRMqvIXih9Akrg,9430 +pymysql/cursors.py,sha256=1E79f3vysxygyfZMhvR6-yFDfysRn3Go8xZTywteh4o,15366 +pymysql/err.py,sha256=bpxayM4IUnFQAd8bUZ3PFsFomi9QSfBk-0TJXyKU2FI,3773 +pymysql/optionfile.py,sha256=ehPrZW4d7pcEvXGAEpsKgLdXpFnIQD93yF7T_jHjoRk,573 +pymysql/protocol.py,sha256=Ur8xXkVvyFc6m5CA34QrHBasADvS_NPFsWU-Q3flRYA,11859 +pymysql/times.py,sha256=_qXgDaYwsHntvpIKSKXp1rrYIgtq6Z9pLyLnO2XNoL0,360 diff --git a/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/REQUESTED b/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/WHEEL b/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/WHEEL new file mode 100644 index 0000000..385faab --- /dev/null +++ b/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/top_level.txt new file mode 100644 index 0000000..d4a7eda --- /dev/null +++ b/venv/lib/python3.10/site-packages/PyMySQL-1.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +pymysql diff --git a/venv/lib/python3.10/site-packages/README.md b/venv/lib/python3.10/site-packages/README.md new file mode 100644 index 0000000..02761da --- /dev/null +++ b/venv/lib/python3.10/site-packages/README.md @@ -0,0 +1,43 @@ +# Pure Python RSA implementation + +[![PyPI](https://img.shields.io/pypi/v/rsa.svg)](https://pypi.org/project/rsa/) +[![Build Status](https://travis-ci.org/sybrenstuvel/python-rsa.svg?branch=master)](https://travis-ci.org/sybrenstuvel/python-rsa) +[![Coverage Status](https://coveralls.io/repos/github/sybrenstuvel/python-rsa/badge.svg?branch=master)](https://coveralls.io/github/sybrenstuvel/python-rsa?branch=master) +[![Code Climate](https://api.codeclimate.com/v1/badges/a99a88d28ad37a79dbf6/maintainability)](https://codeclimate.com/github/codeclimate/codeclimate/maintainability) + +[Python-RSA](https://stuvel.eu/rsa) is a pure-Python RSA implementation. It supports +encryption and decryption, signing and verifying signatures, and key +generation according to PKCS#1 version 1.5. It can be used as a Python +library as well as on the commandline. The code was mostly written by +Sybren A. Stüvel. + +Documentation can be found at the [Python-RSA homepage](https://stuvel.eu/rsa). For all changes, check [the changelog](https://github.com/sybrenstuvel/python-rsa/blob/master/CHANGELOG.md). + +Download and install using: + + pip install rsa + +or download it from the [Python Package Index](https://pypi.org/project/rsa/). + +The source code is maintained at [GitHub](https://github.com/sybrenstuvel/python-rsa/) and is +licensed under the [Apache License, version 2.0](https://www.apache.org/licenses/LICENSE-2.0) + +## Security + +Because of how Python internally stores numbers, it is very hard (if not impossible) to make a pure-Python program secure against timing attacks. This library is no exception, so use it with care. See https://securitypitfalls.wordpress.com/2018/08/03/constant-time-compare-in-python/ for more info. + +## Setup of Development Environment + +``` +python3 -m venv .venv +. ./.venv/bin/activate +pip install poetry +poetry install +``` + +## Publishing a New Release + +``` +. ./.venv/bin/activate +poetry publish --build +``` diff --git a/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/LICENSE b/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/LICENSE new file mode 100644 index 0000000..c933e4b --- /dev/null +++ b/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright 2005-2022 SQLAlchemy authors and contributors . + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/METADATA b/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/METADATA new file mode 100644 index 0000000..ea510d7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/METADATA @@ -0,0 +1,240 @@ +Metadata-Version: 2.1 +Name: SQLAlchemy +Version: 1.4.36 +Summary: Database Abstraction Library +Home-page: https://www.sqlalchemy.org +Author: Mike Bayer +Author-email: mike_mp@zzzcomputing.com +License: MIT +Project-URL: Documentation, https://docs.sqlalchemy.org +Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: importlib-metadata ; python_version < "3.8" +Requires-Dist: greenlet (!=0.4.17) ; python_version >= "3" and (platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32")))))) +Provides-Extra: aiomysql +Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'aiomysql' +Requires-Dist: aiomysql ; (python_version >= "3") and extra == 'aiomysql' +Provides-Extra: aiosqlite +Requires-Dist: typing-extensions (!=3.10.0.1) ; extra == 'aiosqlite' +Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'aiosqlite' +Requires-Dist: aiosqlite ; (python_version >= "3") and extra == 'aiosqlite' +Provides-Extra: asyncio +Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'asyncio' +Provides-Extra: asyncmy +Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'asyncmy' +Requires-Dist: asyncmy (!=0.2.4,>=0.2.3) ; (python_version >= "3") and extra == 'asyncmy' +Provides-Extra: mariadb_connector +Requires-Dist: mariadb (>=1.0.1) ; (python_version >= "3") and extra == 'mariadb_connector' +Provides-Extra: mssql +Requires-Dist: pyodbc ; extra == 'mssql' +Provides-Extra: mssql_pymssql +Requires-Dist: pymssql ; extra == 'mssql_pymssql' +Provides-Extra: mssql_pyodbc +Requires-Dist: pyodbc ; extra == 'mssql_pyodbc' +Provides-Extra: mypy +Requires-Dist: sqlalchemy2-stubs ; extra == 'mypy' +Requires-Dist: mypy (>=0.910) ; (python_version >= "3") and extra == 'mypy' +Provides-Extra: mysql +Requires-Dist: mysqlclient (<2,>=1.4.0) ; (python_version < "3") and extra == 'mysql' +Requires-Dist: mysqlclient (>=1.4.0) ; (python_version >= "3") and extra == 'mysql' +Provides-Extra: mysql_connector +Requires-Dist: mysql-connector-python ; extra == 'mysql_connector' +Provides-Extra: oracle +Requires-Dist: cx-oracle (<8,>=7) ; (python_version < "3") and extra == 'oracle' +Requires-Dist: cx-oracle (>=7) ; (python_version >= "3") and extra == 'oracle' +Provides-Extra: postgresql +Requires-Dist: psycopg2 (>=2.7) ; extra == 'postgresql' +Provides-Extra: postgresql_asyncpg +Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'postgresql_asyncpg' +Requires-Dist: asyncpg ; (python_version >= "3") and extra == 'postgresql_asyncpg' +Provides-Extra: postgresql_pg8000 +Requires-Dist: pg8000 (>=1.16.6) ; extra == 'postgresql_pg8000' +Provides-Extra: postgresql_psycopg2binary +Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary' +Provides-Extra: postgresql_psycopg2cffi +Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi' +Provides-Extra: pymysql +Requires-Dist: pymysql (<1) ; (python_version < "3") and extra == 'pymysql' +Requires-Dist: pymysql ; (python_version >= "3") and extra == 'pymysql' +Provides-Extra: sqlcipher +Requires-Dist: sqlcipher3-binary ; (python_version >= "3") and extra == 'sqlcipher' + +SQLAlchemy +========== + +|PyPI| |Python| |Downloads| + +.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy + :target: https://pypi.org/project/sqlalchemy + :alt: PyPI + +.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy + :target: https://pypi.org/project/sqlalchemy + :alt: PyPI - Python Version + +.. |Downloads| image:: https://img.shields.io/pypi/dm/sqlalchemy + :target: https://pypi.org/project/sqlalchemy + :alt: PyPI - Downloads + + +The Python SQL Toolkit and Object Relational Mapper + +Introduction +------------- + +SQLAlchemy is the Python SQL toolkit and Object Relational Mapper +that gives application developers the full power and +flexibility of SQL. SQLAlchemy provides a full suite +of well known enterprise-level persistence patterns, +designed for efficient and high-performing database +access, adapted into a simple and Pythonic domain +language. + +Major SQLAlchemy features include: + +* An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. +* A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. +* A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. +* A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. +* All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. +* Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + +SQLAlchemy's philosophy: + +* SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. +* An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. +* The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. +* With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer- + initiated decision. +* Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. +* Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. +* Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + +Documentation +------------- + +Latest documentation is at: + +https://www.sqlalchemy.org/docs/ + +Installation / Requirements +--------------------------- + +Full documentation for installation is at +`Installation `_. + +Getting Help / Development / Bug reporting +------------------------------------------ + +Please refer to the `SQLAlchemy Community Guide `_. + +Code of Conduct +--------------- + +Above all, SQLAlchemy places great emphasis on polite, thoughtful, and +constructive communication between users and developers. +Please see our current Code of Conduct at +`Code of Conduct `_. + +License +------- + +SQLAlchemy is distributed under the `MIT license +`_. + + + diff --git a/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/RECORD b/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/RECORD new file mode 100644 index 0000000..f6a00e3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/RECORD @@ -0,0 +1,486 @@ +SQLAlchemy-1.4.36.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +SQLAlchemy-1.4.36.dist-info/LICENSE,sha256=hZ3tJdo0wetz5uc230xfjOPtLtUpBmMXbwbncg2cmiA,1100 +SQLAlchemy-1.4.36.dist-info/METADATA,sha256=ISQSiC_qJZd9sA4uYJPOlXygeWCjK2__a0JxIGNt3nA,9975 +SQLAlchemy-1.4.36.dist-info/RECORD,, +SQLAlchemy-1.4.36.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +SQLAlchemy-1.4.36.dist-info/WHEEL,sha256=Ti9SXxRhD2A7sSJ6_NquJXKHehk9vD0kuf_oco9X6vo,225 +SQLAlchemy-1.4.36.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11 +sqlalchemy/__init__.py,sha256=XCjI1gBSqxxpIOWqamwQgqGLUN2dOfs6rsTdBfxJ7NQ,4114 +sqlalchemy/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/__pycache__/events.cpython-310.pyc,, +sqlalchemy/__pycache__/exc.cpython-310.pyc,, +sqlalchemy/__pycache__/inspection.cpython-310.pyc,, +sqlalchemy/__pycache__/log.cpython-310.pyc,, +sqlalchemy/__pycache__/processors.cpython-310.pyc,, +sqlalchemy/__pycache__/schema.cpython-310.pyc,, +sqlalchemy/__pycache__/types.cpython-310.pyc,, +sqlalchemy/cimmutabledict.cpython-310-x86_64-linux-gnu.so,sha256=3vAtMBzVIuWruD5LZgekTthf1dmGt3SLcwGJM11Y75M,54448 +sqlalchemy/connectors/__init__.py,sha256=2m_LPZFkNExkoaTw14fRActQCcyFl7W81WeYj2O10lM,279 +sqlalchemy/connectors/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/connectors/__pycache__/mxodbc.cpython-310.pyc,, +sqlalchemy/connectors/__pycache__/pyodbc.cpython-310.pyc,, +sqlalchemy/connectors/mxodbc.py,sha256=CApFVkPEL8amXL5HKcG83jU9RbbVg0EQSyxceLWh260,5784 +sqlalchemy/connectors/pyodbc.py,sha256=r3aC-g4qKHoTZvW5cOEavKaEHIUKfMq2tWOaukHPXjo,6825 +sqlalchemy/cprocessors.cpython-310-x86_64-linux-gnu.so,sha256=VHpnVHwv9f4o2xMMXDpYX-ukKlYkXEGTEWDtyhzwKSc,61744 +sqlalchemy/cresultproxy.cpython-310-x86_64-linux-gnu.so,sha256=_wuO239fVrxqIxjGrbRwkqzqK3FLZV0vci-CJWEsVsM,92920 +sqlalchemy/databases/__init__.py,sha256=LAm4NHQgjg4sdCED02wUiZj9_0fKBEkStYtqvLWHArk,1010 +sqlalchemy/databases/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/dialects/__init__.py,sha256=52RcDU2JGS1nW2OHx2nIJ1B_IBI4puWFx09th8Hg-D0,2085 +sqlalchemy/dialects/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/dialects/firebird/__init__.py,sha256=iZH9WTMjUcsAf6Rl6-64CkcoLOixitP45TSZVSBQYL4,1153 +sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/dialects/firebird/__pycache__/base.cpython-310.pyc,, +sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-310.pyc,, +sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-310.pyc,, +sqlalchemy/dialects/firebird/base.py,sha256=P0ycKcsMKJyglm6uikAVDSc_7UV0NPSIU7hL58HQaog,31171 +sqlalchemy/dialects/firebird/fdb.py,sha256=lQhO8S1P8PjUeEW3NXCC1vqNp1DGzBQIUN2eIi-fCC0,4116 +sqlalchemy/dialects/firebird/kinterbasdb.py,sha256=2_RZGXSw12FCEeZW0cXxbaR2Bl7GfMd7gGg5pgUiFzg,6479 +sqlalchemy/dialects/mssql/__init__.py,sha256=fvIR7jRTPH_4HellLg2kjwYIA3HM_jpNWSw9De0JciE,1788 +sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/dialects/mssql/__pycache__/base.cpython-310.pyc,, +sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-310.pyc,, +sqlalchemy/dialects/mssql/__pycache__/json.cpython-310.pyc,, +sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-310.pyc,, +sqlalchemy/dialects/mssql/__pycache__/provision.cpython-310.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-310.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-310.pyc,, +sqlalchemy/dialects/mssql/base.py,sha256=NQUTZrQWNqrwAm_CMNn2oVf6z8oRq9uxr3FQtIf15Y8,115369 +sqlalchemy/dialects/mssql/information_schema.py,sha256=R0xpK7xppti2ToGahDksb9jHy9R9MyHTwCfgeNvw3BQ,7584 +sqlalchemy/dialects/mssql/json.py,sha256=K1RqVl5bslYyVMtk5CWGjRV_I4K1sszXjx2F_nbCVWI,4558 +sqlalchemy/dialects/mssql/mxodbc.py,sha256=HPIxqFtSUY9Ugz-ebNb2T_sLoLp4rQi7qrmezsIYIsM,4808 +sqlalchemy/dialects/mssql/provision.py,sha256=m7ofLZYZinDS91Vgs42fK7dhJNnH-J_Bw2x_tP59tCc,4255 +sqlalchemy/dialects/mssql/pymssql.py,sha256=smbS466-7-cr1o2VBRqkTHlfczy_UIycMgM4uerI5Xw,4843 +sqlalchemy/dialects/mssql/pyodbc.py,sha256=AYq6WD5LGJG3_eP38B-RanJrAWdg5IavdidKTKHilR8,22985 +sqlalchemy/dialects/mysql/__init__.py,sha256=4C8GY2nAGQOrdGj3CseZqF4NR-CkhVZ_CgXFoskGAJs,2190 +sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/base.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/dml.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/expression.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/json.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/provision.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-310.pyc,, +sqlalchemy/dialects/mysql/__pycache__/types.cpython-310.pyc,, +sqlalchemy/dialects/mysql/aiomysql.py,sha256=Xqfr0SjvUu-qQZgrDLBnxo4dRQF9ZrI6tpc4HgiXENE,9609 +sqlalchemy/dialects/mysql/asyncmy.py,sha256=D8slHiFP3hOvwxf8zMY_-72V1owEhnpO0LmQdkz4n4M,9885 +sqlalchemy/dialects/mysql/base.py,sha256=lz_NPjxjCargEjLTgafVyHUFkCUdX0_ZGY-v9ZoAFXg,114949 +sqlalchemy/dialects/mysql/cymysql.py,sha256=zaVxpSLTg8rvIrI6BtlK0815BCLuLKp2ILHLs57thVA,2271 +sqlalchemy/dialects/mysql/dml.py,sha256=6S2VylKWm2wCp-GLiwHkFGlkN6HUO6SU6adS61JvuUc,6208 +sqlalchemy/dialects/mysql/enumerated.py,sha256=Dv5BAF8DxCqfVXIkXt5kzGG-BxNygpdnXrZjyyzKyqM,9364 +sqlalchemy/dialects/mysql/expression.py,sha256=HJ4IO3LPJk4cQYIL-O-jN2vLWxVGCqem_K3h8kKNWzE,3741 +sqlalchemy/dialects/mysql/json.py,sha256=DMQnyo3PQ_XSPvDl8jt26Ya-fyMEaIJDXQBdLVmsdjE,2313 +sqlalchemy/dialects/mysql/mariadb.py,sha256=OBwN9RMQLP-xqLbNMAe5uoz7PEtqa68ln2HwwA6KUn8,585 +sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=vLhoFmC9OFh30bHGRFBwWHv3ou3wTZ8WPZOamgmUuWs,7563 +sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=CT4bFb2WaFHwBDfRSqK3ieltrkulTYwsX0kgbWPrRao,7690 +sqlalchemy/dialects/mysql/mysqldb.py,sha256=qvea9Iuf6SUqb4QSHeCEcbUf3c3FSckjT4jfQSTMlyw,10437 +sqlalchemy/dialects/mysql/oursql.py,sha256=fWWMyvhNZ6ywBGvvwJ8DqtBec8cUtziiIjYopBn2WVg,8523 +sqlalchemy/dialects/mysql/provision.py,sha256=P5ma4Xy5eSOFIcMjIe_zAwu_6ncSXSLVZYYSMS5Io9c,2649 +sqlalchemy/dialects/mysql/pymysql.py,sha256=D106c8jEME1O0wOMV7ZgSuwin7Pv61kKLWYFEEKPpUY,2770 +sqlalchemy/dialects/mysql/pyodbc.py,sha256=zP-eyXHbRSTn8R8AvptxpbjS-5riRrhnWYIIxNRk4QA,4048 +sqlalchemy/dialects/mysql/reflection.py,sha256=TcX8NovMj9BbEkGUALcom4JMAMPc_kL6nmQzSy5XXGs,18553 +sqlalchemy/dialects/mysql/reserved_words.py,sha256=vvAyUvobiAB46Lpd7DhyWPgp3cWdFaVu9_5P39TEXMM,9104 +sqlalchemy/dialects/mysql/types.py,sha256=MrMLGeFo-zJJfGMn39smAfxy5fPvQrgXv49cIrm6Img,24589 +sqlalchemy/dialects/oracle/__init__.py,sha256=POVn6bB3yD-b4ZT7CSYQlmNpxDRIRpfuJ8CTTYgphPM,1229 +sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/dialects/oracle/__pycache__/base.cpython-310.pyc,, +sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-310.pyc,, +sqlalchemy/dialects/oracle/__pycache__/provision.cpython-310.pyc,, +sqlalchemy/dialects/oracle/base.py,sha256=YDCHmDwRkiwEJNG8GPyUXBKPj6CWkqdj4AIhtc5goBc,86866 +sqlalchemy/dialects/oracle/cx_oracle.py,sha256=vXRRPqDPbq8b9bbIVi0l3TKXnf850sD9jv1hTu2T25M,53014 +sqlalchemy/dialects/oracle/provision.py,sha256=GtHrw1rtW0bzPSa9dUE-IjDFGaElyJyw4rwHAK3QDVY,5806 +sqlalchemy/dialects/postgresql/__init__.py,sha256=thvDDu6Vp68lXdF78wagnnOTq7sFBCDwT5X9x8Mygn8,2509 +sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/array.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/base.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/json.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pygresql.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-310.pyc,, +sqlalchemy/dialects/postgresql/array.py,sha256=I-4mTmrRsJSr42EpoUy4OvMBys82PrDO3oMK8FusPbg,13131 +sqlalchemy/dialects/postgresql/asyncpg.py,sha256=FFVn3cctgxfTvRtVV1XUlzmXfIBFgLsqjDnNjj9K9GA,35265 +sqlalchemy/dialects/postgresql/base.py,sha256=T4BWWriX-tSIWZBYH5cBjAn7xHY9xZfRSHDOvJw1P44,158165 +sqlalchemy/dialects/postgresql/dml.py,sha256=O7GBPR4liaOBBJWGlEU86vrfuzLMy3d3LIbeRZ-nSvc,9582 +sqlalchemy/dialects/postgresql/ext.py,sha256=Aap5338DyuYWDFu8F9ydrwuIXZhfMFNydu6OEbQcVqU,8435 +sqlalchemy/dialects/postgresql/hstore.py,sha256=UEjWkExqERMXkK-62Drv8ppJOyV64827xHxi3QpKV-I,12696 +sqlalchemy/dialects/postgresql/json.py,sha256=cIABYehcW9j7ctBCAYXhZFGFQeHgLkisVQB1k2ftnT4,10556 +sqlalchemy/dialects/postgresql/pg8000.py,sha256=_UztntjUclGLtty8nvVwlcNtCEFz_9lsQrf-HR7EpLE,17044 +sqlalchemy/dialects/postgresql/provision.py,sha256=ZDFEIOvtpBIgCnj1Q1R3-WDWx7lFnE6kdEGNTDFpzAw,4319 +sqlalchemy/dialects/postgresql/psycopg2.py,sha256=2u2Tup0P_ZO_X8sGmI7lNndUTXSEl_Mm6HvznB_dh1A,38457 +sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=pBRHxI6KgVePwPO_FAFaE7Nces43qPIviDwbtchi8f8,1691 +sqlalchemy/dialects/postgresql/pygresql.py,sha256=oZ847ZkhqqzPeo1BiQnIP7slX7SIbXdoo1OyC5ehChY,8585 +sqlalchemy/dialects/postgresql/pypostgresql.py,sha256=_Kw2eXUEAefflJVA1dZJ7aCGt2Lown3PW3i2ab2Eat0,3693 +sqlalchemy/dialects/postgresql/ranges.py,sha256=p8jBm8A1P7mHlc9QrnIvJwmxm3GKDe2xr1RgY8dRg-Q,4761 +sqlalchemy/dialects/sqlite/__init__.py,sha256=GwL23FcaoQOso1Sa1RlaF3i5SezqEVjfijvbp8hzRg0,1198 +sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-310.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/base.cpython-310.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-310.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/json.cpython-310.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-310.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-310.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-310.pyc,, +sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=DmB9DkY-9CMB8LzRxE7g6WgcifTptm4fI0Vwwy8jwHw,9995 +sqlalchemy/dialects/sqlite/base.py,sha256=sTyIeurt2vMWt7Zjcnj27ZtWkfjA3CUoE9sbYKgknbU,88436 +sqlalchemy/dialects/sqlite/dml.py,sha256=hFloxZoqsrew4tlzS0DSMyzdKJ9-HU0z-dLKWVgR5ns,6865 +sqlalchemy/dialects/sqlite/json.py,sha256=oFw4Rt8xw-tkD3IMlm3TDEGe1RqrTyvIuqjABsxn8EI,2518 +sqlalchemy/dialects/sqlite/provision.py,sha256=AQILXN5PBUSM05c-SFSFFhPdFqcQDwdoKtUnvLDac14,4676 +sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=1MmhAlAaUTnzm7guppjDzGXQ6_OxFtuGzchSiJ0PeRA,5605 +sqlalchemy/dialects/sqlite/pysqlite.py,sha256=_hIHqR-373bMLUr4fDaN3UXHtJzW-fbLatvXZNx2hWg,23441 +sqlalchemy/dialects/sybase/__init__.py,sha256=STn2xh97yskErTEYZAyrptb5vYOqPamvb9-QnYd3aG4,1364 +sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/dialects/sybase/__pycache__/base.cpython-310.pyc,, +sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-310.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-310.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-310.pyc,, +sqlalchemy/dialects/sybase/base.py,sha256=rOfZ2sN3BEtwIDo9nvIWe5VpgxVvjjLt4gSxFb9VyC0,32421 +sqlalchemy/dialects/sybase/mxodbc.py,sha256=7U4-Y4mf_o6qzFQraQ7XklDTB0PDddF8u6hFIpuAsCE,939 +sqlalchemy/dialects/sybase/pyodbc.py,sha256=bTbAjgvx2LRlhY94DYl_NXRkbVJAd71_LbIvRCtDPX0,2230 +sqlalchemy/dialects/sybase/pysybase.py,sha256=-i6vGx7UIVX2arQE9_9GM_YcqeiRCawqxcXnngjvRAY,3370 +sqlalchemy/engine/__init__.py,sha256=F6KiBbO1ErswjwDKhkswbuCic77W5NnJmHQdP_uI_Cw,2075 +sqlalchemy/engine/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/base.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/characteristics.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/create.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/cursor.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/default.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/events.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/interfaces.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/mock.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/reflection.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/result.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/row.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/strategies.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/url.cpython-310.pyc,, +sqlalchemy/engine/__pycache__/util.cpython-310.pyc,, +sqlalchemy/engine/base.py,sha256=KBDEZWqT7DnFnWclj4u6Ns4UcEe8f2j0OKbfMsasBNY,120627 +sqlalchemy/engine/characteristics.py,sha256=qvd3T8HW470kIxN-x6OzycfjCFdnmbzcaFQeds7KHOw,1817 +sqlalchemy/engine/create.py,sha256=oz20tPBRt3gQCFLLDq7yFJFxmSd8wWzHVjVvSWW1oWc,31286 +sqlalchemy/engine/cursor.py,sha256=2d0UrU6mMg-k7YUh-WybDXkI83JDzvXyzhMX7ywqnAA,68111 +sqlalchemy/engine/default.py,sha256=DG0mzOhEOe9pluYqeGvvRRNOt8VnwzhGGbiKA0mlJpY,65467 +sqlalchemy/engine/events.py,sha256=_qeDo_mMNXXnpQBSAnaRkE1gg6c-r7P5IT78r0aBUuc,33422 +sqlalchemy/engine/interfaces.py,sha256=Os_4HO7Ebo1Hxl8Eym86oqB3h6E7K8T5LQrOTYtbSpY,58421 +sqlalchemy/engine/mock.py,sha256=wJIFZbvkHwAoi7jCupeyZzuE-J9lqyzhJ6VdrAyMNkw,3626 +sqlalchemy/engine/reflection.py,sha256=eGlFTisiNrDjBMlJiia81L1zyKuntuQZ48zl1-xcuJ4,38703 +sqlalchemy/engine/result.py,sha256=4KJmQpTds3svSXIN2hYw-1XLhgOf4KghPz3v16H9jT8,56554 +sqlalchemy/engine/row.py,sha256=m9J7J1NI3jdOHIS5UHGTbqz2bC75yY5q2F996v6oQ_k,18685 +sqlalchemy/engine/strategies.py,sha256=RzejZkLGzWq6QWWJ6a6fyYDdQac4VWCmORCTYEOZwCM,414 +sqlalchemy/engine/url.py,sha256=MiqNxbjPerR5hTjqNd1fsj5o2ht5YnKtUvbDHcMUAGs,26769 +sqlalchemy/engine/util.py,sha256=drzyg95MX5NzC10bSQsqQ-dc3k4N4p009JhQuLUS8r0,8442 +sqlalchemy/event/__init__.py,sha256=I3Y3cjTy0wC_f-pJRX7B-9UizYje3nh3lIHOlL0Xf00,517 +sqlalchemy/event/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/event/__pycache__/api.cpython-310.pyc,, +sqlalchemy/event/__pycache__/attr.cpython-310.pyc,, +sqlalchemy/event/__pycache__/base.cpython-310.pyc,, +sqlalchemy/event/__pycache__/legacy.cpython-310.pyc,, +sqlalchemy/event/__pycache__/registry.cpython-310.pyc,, +sqlalchemy/event/api.py,sha256=yTMDO4cZp-CioTgeDfYGR0O4_zxfFZ-EFdNqM-dOw8E,8043 +sqlalchemy/event/attr.py,sha256=jWU2m7uuuq40HfwIlQK27eJ_3Gg92dtqI4kwu8vhmuk,14625 +sqlalchemy/event/base.py,sha256=FCifBVGLxkNkpr4mN608ZRcAraML8bcS5IU8_vAJjRQ,10936 +sqlalchemy/event/legacy.py,sha256=C09AtrcACXF2gL5c8adk2nLUo1oBfnhFHDkBpv3znUg,6270 +sqlalchemy/event/registry.py,sha256=5FuO494J1n2dUYImM9Yz1kl7C8NmO4c4GtKbk_l-S6k,8486 +sqlalchemy/events.py,sha256=VrZuUXHgwyx4kMKEielctzyTWqDlm2gvzMcc38jedoE,467 +sqlalchemy/exc.py,sha256=x9Z-nIkMQ1r3dqdNmVK5cHQq0zVFrdI6oKkXMw_QB3s,21116 +sqlalchemy/ext/__init__.py,sha256=4-X49d1TiOPC-T8JSpaFiMMVNP8JL9bDoBW19wBmXRY,322 +sqlalchemy/ext/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/ext/__pycache__/associationproxy.cpython-310.pyc,, +sqlalchemy/ext/__pycache__/automap.cpython-310.pyc,, +sqlalchemy/ext/__pycache__/baked.cpython-310.pyc,, +sqlalchemy/ext/__pycache__/compiler.cpython-310.pyc,, +sqlalchemy/ext/__pycache__/horizontal_shard.cpython-310.pyc,, +sqlalchemy/ext/__pycache__/hybrid.cpython-310.pyc,, +sqlalchemy/ext/__pycache__/indexable.cpython-310.pyc,, +sqlalchemy/ext/__pycache__/instrumentation.cpython-310.pyc,, +sqlalchemy/ext/__pycache__/mutable.cpython-310.pyc,, +sqlalchemy/ext/__pycache__/orderinglist.cpython-310.pyc,, +sqlalchemy/ext/__pycache__/serializer.cpython-310.pyc,, +sqlalchemy/ext/associationproxy.py,sha256=-687A1ZZMgToO6emMUy8kDOQb-GE8OqfM01xNkh3QtQ,51139 +sqlalchemy/ext/asyncio/__init__.py,sha256=XKCzBrSBP_LlqaCKpiMeSPUzwNdQFXUg9GL57EOM9-8,823 +sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/ext/asyncio/__pycache__/base.cpython-310.pyc,, +sqlalchemy/ext/asyncio/__pycache__/engine.cpython-310.pyc,, +sqlalchemy/ext/asyncio/__pycache__/events.cpython-310.pyc,, +sqlalchemy/ext/asyncio/__pycache__/exc.cpython-310.pyc,, +sqlalchemy/ext/asyncio/__pycache__/result.cpython-310.pyc,, +sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-310.pyc,, +sqlalchemy/ext/asyncio/__pycache__/session.cpython-310.pyc,, +sqlalchemy/ext/asyncio/base.py,sha256=VQmIq-CMEVQpZPMEa0K91tMxZMqKyCCAwJVuCLiG34w,2280 +sqlalchemy/ext/asyncio/engine.py,sha256=ZFCD6NoXPlQWbUuLUln98WgpxI8-yUv_uPBi4O2pcKY,26434 +sqlalchemy/ext/asyncio/events.py,sha256=_rh2nSAD_6ZqoIRJihiCKUgzSMLBMxBuZ_gUWLpfbHg,1423 +sqlalchemy/ext/asyncio/exc.py,sha256=3tcIXQPCJROB3P_TkoHmkzy6o_dIIuMcnnu4tJB__ck,639 +sqlalchemy/ext/asyncio/result.py,sha256=Dhi1lo8lUnT7MyU1KxlBkV-PL6tL4iXCTaB3PwsXm08,21205 +sqlalchemy/ext/asyncio/scoping.py,sha256=fckFlTcwgGjgurVnp69En-4IFwWRqgUV6ukGgPklDJ4,2960 +sqlalchemy/ext/asyncio/session.py,sha256=DGB4dJPJefCesp5JP_GpITG6n_6_-kP8Rafbm3yVgT8,23920 +sqlalchemy/ext/automap.py,sha256=dbkjS2V9DXrubnQBdEXZzV86EIrLfgtdR1WBVEOs_cQ,45195 +sqlalchemy/ext/baked.py,sha256=DI4hcMk-poznDtAB6S38S7kvo5DXuvrt1CIAT8t5QPw,19969 +sqlalchemy/ext/compiler.py,sha256=Q3Dkj-viLi_1_OFL1EUKsz3RJ8aQk6bYwIflx6tbZR0,22629 +sqlalchemy/ext/declarative/__init__.py,sha256=NS6-oy4iI6AiMaGWGznzYSx4gnB1fOniOGtqPHxC0ms,1842 +sqlalchemy/ext/declarative/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/ext/declarative/__pycache__/extensions.cpython-310.pyc,, +sqlalchemy/ext/declarative/extensions.py,sha256=3NBR9EosOwVlON_NM8rcmke5rI2qoKcj7_jfG5sDN8o,16409 +sqlalchemy/ext/horizontal_shard.py,sha256=2NygP6u9SsOlOqCEqkzNbcSshdxtfxOI78XysnJw3S8,8922 +sqlalchemy/ext/hybrid.py,sha256=M7hI1N-bgmSwU3UFszs5ldVeH4a_NdX34fen_Lk0kb0,41787 +sqlalchemy/ext/indexable.py,sha256=RZmG2074pMoM9-A3evs2ZKqMn3M9uTc3izAI1cN6HQc,11255 +sqlalchemy/ext/instrumentation.py,sha256=ReSLFxqbHgwAKNwoQQmKHoqYvWCob_WuXlPAEUJk4pk,14386 +sqlalchemy/ext/mutable.py,sha256=-s1S_ZKSzklJh1qwGaSyg6TQK0tyIWq4qhKiZ6SSq3M,31997 +sqlalchemy/ext/mypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sqlalchemy/ext/mypy/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/ext/mypy/__pycache__/apply.cpython-310.pyc,, +sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-310.pyc,, +sqlalchemy/ext/mypy/__pycache__/infer.cpython-310.pyc,, +sqlalchemy/ext/mypy/__pycache__/names.cpython-310.pyc,, +sqlalchemy/ext/mypy/__pycache__/plugin.cpython-310.pyc,, +sqlalchemy/ext/mypy/__pycache__/util.cpython-310.pyc,, +sqlalchemy/ext/mypy/apply.py,sha256=9FIH7jxh6Rl1YDE_3tsacpfNb_8floNQkTuHaNgL7XU,9610 +sqlalchemy/ext/mypy/decl_class.py,sha256=buWnXWGOR71CADPZ0_51S49imTXDo-LjTjWsWhhgee0,17343 +sqlalchemy/ext/mypy/infer.py,sha256=twOkOuvJ9SuXd2-Gj33xfqoD4NKgvCa9MUIZ_ozzq6c,17816 +sqlalchemy/ext/mypy/names.py,sha256=exMWKhQ7ouSFXojttr0ZadmigT5O_wFQ1rmZ4r7Ks4g,7930 +sqlalchemy/ext/mypy/plugin.py,sha256=6JnnsFCOJVwkF1o6FmXRhBYszq5gmli_lqLZJKMhALA,9245 +sqlalchemy/ext/mypy/util.py,sha256=MiJ_HqiNQLhC9BLwqGTK5VaVfNvkp5VupxhsdDRkCRQ,8233 +sqlalchemy/ext/orderinglist.py,sha256=JtRiLDROBsDJnME4kZMDzr3FI6rheP-bd1M-C6zxDPU,13875 +sqlalchemy/ext/serializer.py,sha256=RC0aOS6nlFdA0Agkw_-3iiw7Ah2bZnY7sZVZFGj7vHI,5956 +sqlalchemy/future/__init__.py,sha256=tDG3ddqc3cRE61x7Q32ekTBQONsdy30drnW6KnIB92g,525 +sqlalchemy/future/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/future/__pycache__/engine.cpython-310.pyc,, +sqlalchemy/future/engine.py,sha256=Ly-M3NGamVrpnA9XOG_nVLra5f7OlmTMmg7dMb2tn4s,16184 +sqlalchemy/future/orm/__init__.py,sha256=EKGpGVxFh3-ZA34c1Ujfy51Z_2oG05CFiSxk48pE1R8,289 +sqlalchemy/future/orm/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/inspection.py,sha256=Bcoh4cUJMKjZHcGQP-_Nz-swGXLVVWidj36W2F35Trg,3051 +sqlalchemy/log.py,sha256=P5bDWAmejLDPTW839tkH_gYoZG5jHqP1r5C-trWqJds,6886 +sqlalchemy/orm/__init__.py,sha256=6zq5DLpaOlFYlB45o7S48i26ry2OTe5fcXveLSP6XF8,10714 +sqlalchemy/orm/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/attributes.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/base.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/clsregistry.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/collections.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/context.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/decl_api.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/decl_base.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/dependency.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/descriptor_props.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/dynamic.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/evaluator.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/events.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/exc.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/identity.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/instrumentation.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/interfaces.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/loading.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/mapper.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/path_registry.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/persistence.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/properties.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/query.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/relationships.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/scoping.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/session.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/state.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/strategies.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/strategy_options.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/sync.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/unitofwork.cpython-310.pyc,, +sqlalchemy/orm/__pycache__/util.cpython-310.pyc,, +sqlalchemy/orm/attributes.py,sha256=F-rt-o79OHVjk4lSboDS-ujKXnhovbImEK2I_sGE_Zk,77094 +sqlalchemy/orm/base.py,sha256=HZu51CAOyCjJqGGPJbFqOgqbbA_yQ06Lucxpf-J1B54,15068 +sqlalchemy/orm/clsregistry.py,sha256=i8-S8jCSsslTUlOXmfaxoDDkxy3nYGUiZVUeJlpDERA,13286 +sqlalchemy/orm/collections.py,sha256=YXLS4MyQIGWVAV5S3sXLvJKdfVCFAQsKFymOgxzkSuU,54723 +sqlalchemy/orm/context.py,sha256=LqN_JlQsSx8AhhrEi-HyrU1qkJlsO_nv6hBoUjHw--Y,109787 +sqlalchemy/orm/decl_api.py,sha256=rZSz1jys3n_V2woNUZuV8nciN0VgDZFMAiQdNbLkr10,35564 +sqlalchemy/orm/decl_base.py,sha256=wnDP0akLnK6dn5omleEQjB4E7PFv4LSRlI1qgcB8AAM,43487 +sqlalchemy/orm/dependency.py,sha256=RsQ6UtF0Ryl-hgMqw9mm5tqNCZa5bbW56_X1prm6R-8,46987 +sqlalchemy/orm/descriptor_props.py,sha256=mdVGbdKc4N8gCxV2RXDGMFZB3V2aWZARUUH9VOe0K1s,25987 +sqlalchemy/orm/dynamic.py,sha256=heJsZBQSckDO1k2fYd1x1tap6qEDoS2yogx9VapzIY4,15957 +sqlalchemy/orm/evaluator.py,sha256=Cc4vdrYRq8acVhpVmGenA-xn_GRodgkhuplLlqUfrdo,6852 +sqlalchemy/orm/events.py,sha256=ex5i3V2oFZ2SPHsz5jY_WqtGnCK2rplfu9xmUD8CZoU,111443 +sqlalchemy/orm/exc.py,sha256=dCW9lmc-DpwTJaHo-q8TJac5dK2jWFc4Fes6V8Z_gUo,6532 +sqlalchemy/orm/identity.py,sha256=_UnI-6Beolu3xWGGERUQfVg0dc9sb-3G22Xv8yzfKFg,7233 +sqlalchemy/orm/instrumentation.py,sha256=L4pmTKaGvmRjd8UTThGF-QqScFIWWs9hx6AZA0-rOn0,20378 +sqlalchemy/orm/interfaces.py,sha256=kAu29kzWsyA9j44tkT-Iqia_jqmj2WZpTgTGuJ90_9g,30249 +sqlalchemy/orm/loading.py,sha256=5rAng8kIp5UOLtZd5nUjduDLIhUQ80Sodc9W-jSMc1E,49317 +sqlalchemy/orm/mapper.py,sha256=kxqUj_PStzesUDIwQ-CMLbV3E_ew9PZ-IUGmIF52hWo,139164 +sqlalchemy/orm/path_registry.py,sha256=0Akeeayg-OM-pPOAxVCyggGINInYX8kXrQkYWOtesd0,16411 +sqlalchemy/orm/persistence.py,sha256=KW7iYNJpEHjUMVFr_pQkkyvoSC1cfSpmzRvVv1H_sgs,84250 +sqlalchemy/orm/properties.py,sha256=toLCQ3PDa5DFjdrjYUfknGK56BvXnDNmU1eDoWHqNFQ,14912 +sqlalchemy/orm/query.py,sha256=a8HV58j8IstTkkqyB_AaI1ZG7yYglH23SdKcFQ-xOVA,125481 +sqlalchemy/orm/relationships.py,sha256=vB36ThKIw995MsbMMZeRvxK3JvuF0nMRNVoRw3IhNDE,142687 +sqlalchemy/orm/scoping.py,sha256=K4sY8l969uQigmm9VV1GL4XmIA505r_x_1yeDZSRWMQ,7257 +sqlalchemy/orm/session.py,sha256=OQ5mZ7FjiJuqDkEKguImAOZ_5OdJ9kyPJT5n-GksmG4,159962 +sqlalchemy/orm/state.py,sha256=M8JBLFJQGTRdxs6K6KL8JU-J01qI1jD4JlLHsWd8MP4,33409 +sqlalchemy/orm/strategies.py,sha256=PXl1qSS4ABH19vOz0wVmnlzqfP02XaggC2da1G09tOs,108207 +sqlalchemy/orm/strategy_options.py,sha256=0S100xkX9Lwor6TeRjZ1aNJgEexeCrlKTq6xME85RcQ,67319 +sqlalchemy/orm/sync.py,sha256=KRyKql_Pgjm_y8clsUOLe8jo5JzM1t6II2vCorbtRow,5824 +sqlalchemy/orm/unitofwork.py,sha256=XEMx8PhX-KdP9tQpVgB0mcqnPlVbpSPG4bSKW6zIMRE,27090 +sqlalchemy/orm/util.py,sha256=x3FzFUgfm6LpTEmznttkY9cVBfZwlRbLHwRBQD4q3UQ,73870 +sqlalchemy/pool/__init__.py,sha256=dTuz0I0lQ1aj_BHoMzoBk4FW1rI-4ssLZfXi7826ja8,1603 +sqlalchemy/pool/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/pool/__pycache__/base.cpython-310.pyc,, +sqlalchemy/pool/__pycache__/dbapi_proxy.cpython-310.pyc,, +sqlalchemy/pool/__pycache__/events.cpython-310.pyc,, +sqlalchemy/pool/__pycache__/impl.cpython-310.pyc,, +sqlalchemy/pool/base.py,sha256=qffJ_mAqPfxcERTWZkKc0sLHVl-BFsFEVY7R7BmKtpI,38552 +sqlalchemy/pool/dbapi_proxy.py,sha256=ZDa32bJzGunYw8OyS5g0GfLoRo-Qwrf7jcsGsA9StSg,4229 +sqlalchemy/pool/events.py,sha256=nVQfjW55gD6-DEtTIDUCx-cNHZCKtt7C3gsdqf-PFWg,10299 +sqlalchemy/pool/impl.py,sha256=m8kUBUGN3ZikSndBO8mcu2ym8kd_o8vEtLsDSycZXAI,15783 +sqlalchemy/processors.py,sha256=LWwr9g-qDHiike9UKqD1yX8ghCxjpAWRdQk7Mh5NepA,5745 +sqlalchemy/schema.py,sha256=FLG1OeHCucohyiShM_jvw4OJivdrWSAsI7MxPIX7Q1M,2413 +sqlalchemy/sql/__init__.py,sha256=ojeq7QnyQrUcO1Ia7nogzumgOfTKXk6Oib7HuH_hz6Y,4661 +sqlalchemy/sql/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/annotation.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/base.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/coercions.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/compiler.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/crud.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/ddl.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/default_comparator.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/dml.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/elements.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/events.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/expression.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/functions.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/lambdas.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/naming.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/operators.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/roles.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/schema.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/selectable.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/sqltypes.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/traversals.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/type_api.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/util.cpython-310.pyc,, +sqlalchemy/sql/__pycache__/visitors.cpython-310.pyc,, +sqlalchemy/sql/annotation.py,sha256=xGpbeieggvywgRlqerZxz6lYnuSob7C86rJQ87k6Va0,11502 +sqlalchemy/sql/base.py,sha256=7Q-BZaOxQ9Z_arTEmQjUEFhbGjhW9lOquEeXTMukx-s,55839 +sqlalchemy/sql/coercions.py,sha256=-zE2wSCD0hqHI3OsJ1TZjF95EAVTNkDL0A-9Gsk3EU8,34429 +sqlalchemy/sql/compiler.py,sha256=mp8zJ5fcuIW3b8cqyAuyCCE_VnUmXNq6sp3sz397gOU,185894 +sqlalchemy/sql/crud.py,sha256=1N3uPbMDcD-p2WuPYF7qo3VAj2BR_x9aHmRG9B0rf_A,35420 +sqlalchemy/sql/ddl.py,sha256=OV8dpPN3tW0nepwxitfz05W804mGJX6I3HHNJsI0mDo,44208 +sqlalchemy/sql/default_comparator.py,sha256=MAYPDiIk0DssbvDV9JWbiWp_2GlUc2WyQ1TMG0T1PrA,11046 +sqlalchemy/sql/dml.py,sha256=5V33vxJb5nZERJt3LrfiqgBwjZ2tLtEDlyrcZuzq4w8,54983 +sqlalchemy/sql/elements.py,sha256=L9vhvx1yeJteTl9dFr-TfDfS87bQnXH7qC61XGrmzNg,180956 +sqlalchemy/sql/events.py,sha256=tusqYeUwf421_pG-T39wIHz7RtzixftIPhJG6CP_6Io,13369 +sqlalchemy/sql/expression.py,sha256=cyzp-pgHBfrgQ6_mRxo4T4zNSKIIzd40PlRLgwXI5aM,8828 +sqlalchemy/sql/functions.py,sha256=LNFpzqQ7j60n381_XwoYTU0vKre5eR8cfAOSzTWWcno,48461 +sqlalchemy/sql/lambdas.py,sha256=1A5EmfJp7vAOEQvyl_iULu79lvAGRwuHlPaib2uWz4c,44408 +sqlalchemy/sql/naming.py,sha256=bmjEtvUW0Ccrc5tzH0_PcoPeA5jAtDLPJ4QxtKaAwe8,6786 +sqlalchemy/sql/operators.py,sha256=YL5nfGJywmPdPKYdbpUN4b_sJVWVmuMgNx-0kmYDszY,47433 +sqlalchemy/sql/roles.py,sha256=ZTgs4PY4rneDh2suTVbmn25yGZyW34sztNWX8cOUf3M,5638 +sqlalchemy/sql/schema.py,sha256=9oyVpVlRMni6MimGQCIMI6H_2sWbmgpXjsFgkuTicZo,194521 +sqlalchemy/sql/selectable.py,sha256=9EuVFuWOGvIVsru3mP3HI8z-lsqrRXeLU3fQWdXf3tQ,237606 +sqlalchemy/sql/sqltypes.py,sha256=XKRVHg8jpEdT_59tledHVJ9MrHtLICSuLIolqAuwpvo,113421 +sqlalchemy/sql/traversals.py,sha256=P0GP8F8RlM-lpL5jm3gWj7-NnE8klIXEcDmHk5Dmc-c,52719 +sqlalchemy/sql/type_api.py,sha256=eRwKZtnJXNZ00v1Nz20hOLEf0g0EKXXJ_8223gTLtzw,70268 +sqlalchemy/sql/util.py,sha256=vdkdts9R06zwEv7x0DaC7kNryW2JCMoTaVew8bo7bSk,35856 +sqlalchemy/sql/visitors.py,sha256=XLRAf08NKf5ndsNDIRY3wPJaaEBIIxl3DDI_dTKrh_s,27329 +sqlalchemy/testing/__init__.py,sha256=TKwXQsqFFV4gjeO48VGaLhCE99qhIVSQNxFrKdP6uNk,2850 +sqlalchemy/testing/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/assertions.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/assertsql.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/asyncio.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/config.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/engines.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/entities.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/exclusions.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/fixtures.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/mock.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/pickleable.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/profiling.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/provision.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/requirements.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/schema.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/util.cpython-310.pyc,, +sqlalchemy/testing/__pycache__/warnings.cpython-310.pyc,, +sqlalchemy/testing/assertions.py,sha256=fcCcIUk04m2XgpotqK2mRD5nKXsyOHXV8tchAAnfQyk,26502 +sqlalchemy/testing/assertsql.py,sha256=OIt0QyHKlFJ4zxu6WrX8_ufmBD9KrMgFrjsXTGkU3ys,14964 +sqlalchemy/testing/asyncio.py,sha256=B6ZqYcQpT6QtM8gR3o3AcZX32J6ZbWDqTTZGklVo5-I,3671 +sqlalchemy/testing/config.py,sha256=XhmzFNkEN_djORr4r6owvoIl3G5zA6Eo5neUiEJXy0E,6543 +sqlalchemy/testing/engines.py,sha256=s4h7bKB2Bqmu1rlquR2O88UktP03n6UVrrWkTNhqm3w,13392 +sqlalchemy/testing/entities.py,sha256=sOd9BlmZFPQFrBdCUlkOR8lxGEQNExkJmS_V2U5WIOk,3253 +sqlalchemy/testing/exclusions.py,sha256=zOthfVJs07z9wN2iAH0rGT39Q76Y_2cBuk5dPEW4wOA,13329 +sqlalchemy/testing/fixtures.py,sha256=h8N_u43aqyXJEPsYoGdwBNziEGwW6Y_HSA7D9-Dz1ys,25828 +sqlalchemy/testing/mock.py,sha256=RUTHkpnxCQfsDlEZ_aQttL_3SXLATwxt4olgmSxAsJw,894 +sqlalchemy/testing/pickleable.py,sha256=ATT345KycDbqLMgp-x7P_DaPUX1z_g6gC_BukyYhqBQ,2707 +sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sqlalchemy/testing/plugin/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-310.pyc,, +sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-310.pyc,, +sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-310.pyc,, +sqlalchemy/testing/plugin/__pycache__/reinvent_fixtures_py2k.cpython-310.pyc,, +sqlalchemy/testing/plugin/bootstrap.py,sha256=038KOv89msOTFsWoDvCyPRb3ZTMv5eAOOKoGPHuZ7zs,1701 +sqlalchemy/testing/plugin/plugin_base.py,sha256=9Bg56KOsZSGW1jLHh_7fle85yFocyV8AGGVlswO9XAU,21540 +sqlalchemy/testing/plugin/pytestplugin.py,sha256=_NbB52E6sv6R9NJApMxMnwomH8y7iirfCYKnXvUH1g0,26133 +sqlalchemy/testing/plugin/reinvent_fixtures_py2k.py,sha256=MdakbJzFh8N_7gUpX-nFbGPFs3AZRsmDAe-7zucf0ls,3288 +sqlalchemy/testing/profiling.py,sha256=ullStV2c-R4jTQJMK1tMKZE5qtSZ-PB1LzHod_hA230,10566 +sqlalchemy/testing/provision.py,sha256=YUEX9eiHBnQYpTHKBWM9IBMoVRFIgm6sjcZIqOeyKIc,12047 +sqlalchemy/testing/requirements.py,sha256=Bbm-XJ0Yg3EiV0ko5fwJSaINipUEtB7X72eBxjdjefU,43433 +sqlalchemy/testing/schema.py,sha256=INOq15yhNyANmheylSQBUlm0IWRaAkEX22BpHSMqn08,6544 +sqlalchemy/testing/suite/__init__.py,sha256=_firVc2uS3TMZ3vH2baQzNb17ubM78RHtb9kniSybmk,476 +sqlalchemy/testing/suite/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/testing/suite/__pycache__/test_cte.cpython-310.pyc,, +sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-310.pyc,, +sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-310.pyc,, +sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-310.pyc,, +sqlalchemy/testing/suite/__pycache__/test_insert.cpython-310.pyc,, +sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-310.pyc,, +sqlalchemy/testing/suite/__pycache__/test_results.cpython-310.pyc,, +sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-310.pyc,, +sqlalchemy/testing/suite/__pycache__/test_select.cpython-310.pyc,, +sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-310.pyc,, +sqlalchemy/testing/suite/__pycache__/test_types.cpython-310.pyc,, +sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-310.pyc,, +sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-310.pyc,, +sqlalchemy/testing/suite/test_cte.py,sha256=XuTuaWblSXyO1OOUTShBBmNch7fBdGnlMD84ooVTqFY,6183 +sqlalchemy/testing/suite/test_ddl.py,sha256=UwbfljXHdWUen3muIcgnOPi-A4AO6F1QzSOiHf9lU-A,11762 +sqlalchemy/testing/suite/test_deprecations.py,sha256=8oLDFUswey8KjPFKRUsqMyGT5sUMMoPQr7-XyIBMehw,5059 +sqlalchemy/testing/suite/test_dialect.py,sha256=eR1VVOb2fm955zavpWkmMjipCva3QvEE177U0OG-0LY,10895 +sqlalchemy/testing/suite/test_insert.py,sha256=oKtVjFuxqdSV5uKj5-OxdSABupLp0pECkWkSLd2U_QA,11134 +sqlalchemy/testing/suite/test_reflection.py,sha256=p-m2BjuWh7jW2vXvY_LxYsfjW47HqGs9O9PUpfm1HIs,58130 +sqlalchemy/testing/suite/test_results.py,sha256=xcoSl1ueaHo8LgKZp0Z1lJ44Mhjf2hxlWs_LjNLBNiE,13983 +sqlalchemy/testing/suite/test_rowcount.py,sha256=GQQRXIWbb6SfD5hwtBC8qvkGAgi1rI5Pv3c59eoumck,4877 +sqlalchemy/testing/suite/test_select.py,sha256=is3BbULeOWOJTRCoUwPnh6Crue15FXfkXKqAkxrFeGM,55464 +sqlalchemy/testing/suite/test_sequence.py,sha256=eCyOQlynF8T0cLrIMz0PO6WuW8ktpFVYq_fQp5CQ298,8431 +sqlalchemy/testing/suite/test_types.py,sha256=airX8OuJJdft4DU8okOLecJbcUhC15urr60Yu1U8Qe4,48044 +sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=CndeAtV3DWJXxLbOoumqf4_mOOYcW_yNOrbKQ4cwFhw,6737 +sqlalchemy/testing/suite/test_update_delete.py,sha256=w9MMRqJCm7OW0Q5XaVjS6B8BGY_b_VvBeK3EWr7NKhU,1625 +sqlalchemy/testing/util.py,sha256=bvCWcESEPEO8QUTH0CcOa4Xg65EYK--V8Q_XeFcfGfE,12503 +sqlalchemy/testing/warnings.py,sha256=l9lI3heNOSbKreAhLcABpaA1e_6Ioi4l7q0mr5jY5OI,2270 +sqlalchemy/types.py,sha256=x8YDIEypMHOzWb7dzp67tW2WfDF7xtdh72HVDxm-aaY,2995 +sqlalchemy/util/__init__.py,sha256=ype9lmU0A-NkYtLU5eMMItaheKpIYjxBy8GO7Zy9pos,6347 +sqlalchemy/util/__pycache__/__init__.cpython-310.pyc,, +sqlalchemy/util/__pycache__/_collections.cpython-310.pyc,, +sqlalchemy/util/__pycache__/_compat_py3k.cpython-310.pyc,, +sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-310.pyc,, +sqlalchemy/util/__pycache__/_preloaded.cpython-310.pyc,, +sqlalchemy/util/__pycache__/compat.cpython-310.pyc,, +sqlalchemy/util/__pycache__/concurrency.cpython-310.pyc,, +sqlalchemy/util/__pycache__/deprecations.cpython-310.pyc,, +sqlalchemy/util/__pycache__/langhelpers.cpython-310.pyc,, +sqlalchemy/util/__pycache__/queue.cpython-310.pyc,, +sqlalchemy/util/__pycache__/topological.cpython-310.pyc,, +sqlalchemy/util/_collections.py,sha256=Nulmym_NZYGN4OyE9cMtIVSoTwOzk3eJpSJ20l8j-lU,29139 +sqlalchemy/util/_compat_py3k.py,sha256=KibHVHAIlQfYdl8xs3ZhJQDlWEI6EhudTbOnMc2x9e4,2195 +sqlalchemy/util/_concurrency_py3k.py,sha256=5fTahmOgokaam-u-z7Xv0DYKR7YnK4TNjQqbVRYhoKQ,6598 +sqlalchemy/util/_preloaded.py,sha256=rx7QZ4T1zDZV5lktSvQlop3O0kdbCFVMmNDp5IOhpXQ,2396 +sqlalchemy/util/compat.py,sha256=7VdCiE0Nirs1ZR_3QMrKITjL8cq6TG2uFYGOF2WuGX0,18245 +sqlalchemy/util/concurrency.py,sha256=LtozDo0PsiToyVmKzSDnu8qOMhRyGVjTNMsBiKro9d8,2278 +sqlalchemy/util/deprecations.py,sha256=kCTvlj8qreXnXCqd7tdlMeju5bXUnzxT5vQp4qJ0vZk,11729 +sqlalchemy/util/langhelpers.py,sha256=bw0P3IhdD8U3gj0oaaI_ulOJYLJcRZ7vLkP7_9cbrjI,56254 +sqlalchemy/util/queue.py,sha256=FW6DSeO_GadaW0UA2EXjrBtFPRHO-dNGEoRwqHTfkMA,9293 +sqlalchemy/util/topological.py,sha256=MV1lkI2E0JdVIJVplggVo6iO_ZEVlUHRGvMW9AsXJRA,2859 diff --git a/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/REQUESTED b/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/WHEEL b/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/WHEEL new file mode 100644 index 0000000..07868c1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/WHEEL @@ -0,0 +1,8 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_5_x86_64 +Tag: cp310-cp310-manylinux1_x86_64 +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/top_level.txt new file mode 100644 index 0000000..39fb2be --- /dev/null +++ b/venv/lib/python3.10/site-packages/SQLAlchemy-1.4.36.dist-info/top_level.txt @@ -0,0 +1 @@ +sqlalchemy diff --git a/venv/lib/python3.10/site-packages/__pycache__/six.cpython-310.pyc b/venv/lib/python3.10/site-packages/__pycache__/six.cpython-310.pyc new file mode 100644 index 0000000..b776682 Binary files /dev/null and b/venv/lib/python3.10/site-packages/__pycache__/six.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc b/venv/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc new file mode 100644 index 0000000..36a036f Binary files /dev/null and b/venv/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_distutils_hack/__init__.py b/venv/lib/python3.10/site-packages/_distutils_hack/__init__.py new file mode 100644 index 0000000..f707416 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_distutils_hack/__init__.py @@ -0,0 +1,132 @@ +import sys +import os +import re +import importlib +import warnings + + +is_pypy = '__pypy__' in sys.builtin_module_names + + +warnings.filterwarnings('ignore', + r'.+ distutils\b.+ deprecated', + DeprecationWarning) + + +def warn_distutils_present(): + if 'distutils' not in sys.modules: + return + if is_pypy and sys.version_info < (3, 7): + # PyPy for 3.6 unconditionally imports distutils, so bypass the warning + # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 + return + warnings.warn( + "Distutils was imported before Setuptools, but importing Setuptools " + "also replaces the `distutils` module in `sys.modules`. This may lead " + "to undesirable behaviors or errors. To avoid these issues, avoid " + "using distutils directly, ensure that setuptools is installed in the " + "traditional way (e.g. not an editable install), and/or make sure " + "that setuptools is always imported before distutils.") + + +def clear_distutils(): + if 'distutils' not in sys.modules: + return + warnings.warn("Setuptools is replacing distutils.") + mods = [name for name in sys.modules if re.match(r'distutils\b', name)] + for name in mods: + del sys.modules[name] + + +def enabled(): + """ + Allow selection of distutils by environment variable. + """ + which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib') + return which == 'local' + + +def ensure_local_distutils(): + clear_distutils() + + # With the DistutilsMetaFinder in place, + # perform an import to cause distutils to be + # loaded from setuptools._distutils. Ref #2906. + add_shim() + importlib.import_module('distutils') + remove_shim() + + # check that submodules load as expected + core = importlib.import_module('distutils.core') + assert '_distutils' in core.__file__, core.__file__ + + +def do_override(): + """ + Ensure that the local copy of distutils is preferred over stdlib. + + See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 + for more motivation. + """ + if enabled(): + warn_distutils_present() + ensure_local_distutils() + + +class DistutilsMetaFinder: + def find_spec(self, fullname, path, target=None): + if path is not None: + return + + method_name = 'spec_for_{fullname}'.format(**locals()) + method = getattr(self, method_name, lambda: None) + return method() + + def spec_for_distutils(self): + import importlib.abc + import importlib.util + + class DistutilsLoader(importlib.abc.Loader): + + def create_module(self, spec): + return importlib.import_module('setuptools._distutils') + + def exec_module(self, module): + pass + + return importlib.util.spec_from_loader('distutils', DistutilsLoader()) + + def spec_for_pip(self): + """ + Ensure stdlib distutils when running under pip. + See pypa/pip#8761 for rationale. + """ + if self.pip_imported_during_build(): + return + clear_distutils() + self.spec_for_distutils = lambda: None + + @staticmethod + def pip_imported_during_build(): + """ + Detect if pip is being imported in a build script. Ref #2355. + """ + import traceback + return any( + frame.f_globals['__file__'].endswith('setup.py') + for frame, line in traceback.walk_stack(None) + ) + + +DISTUTILS_FINDER = DistutilsMetaFinder() + + +def add_shim(): + sys.meta_path.insert(0, DISTUTILS_FINDER) + + +def remove_shim(): + try: + sys.meta_path.remove(DISTUTILS_FINDER) + except ValueError: + pass diff --git a/venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..f4dcb7d Binary files /dev/null and b/venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc b/venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc new file mode 100644 index 0000000..502ef19 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_distutils_hack/override.py b/venv/lib/python3.10/site-packages/_distutils_hack/override.py new file mode 100644 index 0000000..2cc433a --- /dev/null +++ b/venv/lib/python3.10/site-packages/_distutils_hack/override.py @@ -0,0 +1 @@ +__import__('_distutils_hack').do_override() diff --git a/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/LICENSE.txt b/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/LICENSE.txt new file mode 100644 index 0000000..e497a32 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/LICENSE.txt @@ -0,0 +1,13 @@ + Copyright aio-libs contributors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/METADATA b/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/METADATA new file mode 100644 index 0000000..4289464 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/METADATA @@ -0,0 +1,253 @@ +Metadata-Version: 2.1 +Name: aiohttp +Version: 3.8.4 +Summary: Async http client/server framework (asyncio) +Home-page: https://github.com/aio-libs/aiohttp +Maintainer: aiohttp team +Maintainer-email: team@aiohttp.org +License: Apache 2 +Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby +Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp +Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html +Project-URL: Docs: RTD, https://docs.aiohttp.org +Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp +Classifier: Development Status :: 5 - Production/Stable +Classifier: Framework :: AsyncIO +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE.txt +Requires-Dist: attrs (>=17.3.0) +Requires-Dist: charset-normalizer (<4.0,>=2.0) +Requires-Dist: multidict (<7.0,>=4.5) +Requires-Dist: async-timeout (<5.0,>=4.0.0a3) +Requires-Dist: yarl (<2.0,>=1.0) +Requires-Dist: frozenlist (>=1.1.1) +Requires-Dist: aiosignal (>=1.1.2) +Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7" +Requires-Dist: asynctest (==0.13.0) ; python_version < "3.8" +Requires-Dist: typing-extensions (>=3.7.4) ; python_version < "3.8" +Provides-Extra: speedups +Requires-Dist: aiodns ; extra == 'speedups' +Requires-Dist: Brotli ; extra == 'speedups' +Requires-Dist: cchardet ; (python_version < "3.10") and extra == 'speedups' + +================================== +Async http client/server framework +================================== + +.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg + :height: 64px + :width: 64px + :alt: aiohttp logo + +| + +.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg + :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI + :alt: GitHub Actions status for master branch + +.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/aiohttp + :alt: codecov.io status for master branch + +.. image:: https://badge.fury.io/py/aiohttp.svg + :target: https://pypi.org/project/aiohttp + :alt: Latest PyPI package version + +.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest + :target: https://docs.aiohttp.org/ + :alt: Latest Read The Docs + +.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group + :target: https://aio-libs.discourse.group + :alt: Discourse status + +.. image:: https://badges.gitter.im/Join%20Chat.svg + :target: https://gitter.im/aio-libs/Lobby + :alt: Chat on Gitter + + +Key Features +============ + +- Supports both client and server side of HTTP protocol. +- Supports both client and server Web-Sockets out-of-the-box and avoids + Callback Hell. +- Provides Web-server with middlewares and plugable routing. + + +Getting started +=============== + +Client +------ + +To get something from the web: + +.. code-block:: python + + import aiohttp + import asyncio + + async def main(): + + async with aiohttp.ClientSession() as session: + async with session.get('http://python.org') as response: + + print("Status:", response.status) + print("Content-type:", response.headers['content-type']) + + html = await response.text() + print("Body:", html[:15], "...") + + asyncio.run(main()) + +This prints: + +.. code-block:: + + Status: 200 + Content-type: text/html; charset=utf-8 + Body: ... + +Coming from `requests `_ ? Read `why we need so many lines `_. + +Server +------ + +An example using a simple server: + +.. code-block:: python + + # examples/server_simple.py + from aiohttp import web + + async def handle(request): + name = request.match_info.get('name', "Anonymous") + text = "Hello, " + name + return web.Response(text=text) + + async def wshandle(request): + ws = web.WebSocketResponse() + await ws.prepare(request) + + async for msg in ws: + if msg.type == web.WSMsgType.text: + await ws.send_str("Hello, {}".format(msg.data)) + elif msg.type == web.WSMsgType.binary: + await ws.send_bytes(msg.data) + elif msg.type == web.WSMsgType.close: + break + + return ws + + + app = web.Application() + app.add_routes([web.get('/', handle), + web.get('/echo', wshandle), + web.get('/{name}', handle)]) + + if __name__ == '__main__': + web.run_app(app) + + +Documentation +============= + +https://aiohttp.readthedocs.io/ + + +Demos +===== + +https://github.com/aio-libs/aiohttp-demos + + +External links +============== + +* `Third party libraries + `_ +* `Built with aiohttp + `_ +* `Powered by aiohttp + `_ + +Feel free to make a Pull Request for adding your link to these pages! + + +Communication channels +====================== + +*aio-libs discourse group*: https://aio-libs.discourse.group + +*gitter chat* https://gitter.im/aio-libs/Lobby + +We support `Stack Overflow +`_. +Please add *aiohttp* tag to your question there. + +Requirements +============ + +- Python >= 3.6 +- async-timeout_ +- attrs_ +- charset-normalizer_ +- multidict_ +- yarl_ +- frozenlist_ + +Optionally you may install the cChardet_ and aiodns_ libraries (highly +recommended for sake of speed). + +.. _charset-normalizer: https://pypi.org/project/charset-normalizer +.. _aiodns: https://pypi.python.org/pypi/aiodns +.. _attrs: https://github.com/python-attrs/attrs +.. _multidict: https://pypi.python.org/pypi/multidict +.. _frozenlist: https://pypi.org/project/frozenlist/ +.. _yarl: https://pypi.python.org/pypi/yarl +.. _async-timeout: https://pypi.python.org/pypi/async_timeout +.. _cChardet: https://pypi.python.org/pypi/cchardet + +License +======= + +``aiohttp`` is offered under the Apache 2 license. + + +Keepsafe +======== + +The aiohttp community would like to thank Keepsafe +(https://www.getkeepsafe.com) for its support in the early days of +the project. + + +Source code +=========== + +The latest developer version is available in a GitHub repository: +https://github.com/aio-libs/aiohttp + +Benchmarks +========== + +If you are interested in efficiency, the AsyncIO community maintains a +list of benchmarks on the official wiki: +https://github.com/python/asyncio/wiki/Benchmarks diff --git a/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/RECORD b/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/RECORD new file mode 100644 index 0000000..0bd368a --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/RECORD @@ -0,0 +1,117 @@ +aiohttp-3.8.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +aiohttp-3.8.4.dist-info/LICENSE.txt,sha256=n4DQ2311WpQdtFchcsJw7L2PCCuiFd3QlZhZQu2Uqes,588 +aiohttp-3.8.4.dist-info/METADATA,sha256=hlYJN6lSJ-2Ay9XQtsfMGgv5ACNNQMKGMLnTslTZRVM,7355 +aiohttp-3.8.4.dist-info/RECORD,, +aiohttp-3.8.4.dist-info/WHEEL,sha256=nKSwEH5fkxvG0Vdj1Hx7vbuU-SGQ9Nxl4yFFsCilvhs,152 +aiohttp-3.8.4.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8 +aiohttp/.hash/_cparser.pxd.hash,sha256=GoFy-KArtO3unhO5IuosMnc-wwcx7yofVZp2gJi_n_Y,121 +aiohttp/.hash/_find_header.pxd.hash,sha256=_mbpD6vM-CVCKq3ulUvsOAz5Wdo88wrDzfpOsMQaMNA,125 +aiohttp/.hash/_helpers.pyi.hash,sha256=Ew4BZDc2LqFwszgZZUHHrJvw5P8HBhJ700n1Ntg52hE,121 +aiohttp/.hash/_helpers.pyx.hash,sha256=5JQ6BlMBE4HnRaCGdkK9_wpL3ZSWpU1gyLYva0Wwx2c,121 +aiohttp/.hash/_http_parser.pyx.hash,sha256=pEF-JTzd1dVYEwfuzUiTtp8aekvrhhOwiFi4vELWcsM,125 +aiohttp/.hash/_http_writer.pyx.hash,sha256=3Qg3T3D-Ud73elzPHBufK0yEu9tP5jsu6g-aPKQY9gE,125 +aiohttp/.hash/_websocket.pyx.hash,sha256=M97f-Yti-4vnE4GNTD1s_DzKs-fG_ww3jle6EUvixnE,123 +aiohttp/.hash/hdrs.py.hash,sha256=KpTaDTcWfiQrW2QPA5glgIfw6o5JC1hsAYZHeFMuBnI,116 +aiohttp/__init__.py,sha256=sGszf3xlTnDTypy5NE-MWYmTIET2IudwBZgWDV8z9Y8,6870 +aiohttp/__pycache__/__init__.cpython-310.pyc,, +aiohttp/__pycache__/abc.cpython-310.pyc,, +aiohttp/__pycache__/base_protocol.cpython-310.pyc,, +aiohttp/__pycache__/client.cpython-310.pyc,, +aiohttp/__pycache__/client_exceptions.cpython-310.pyc,, +aiohttp/__pycache__/client_proto.cpython-310.pyc,, +aiohttp/__pycache__/client_reqrep.cpython-310.pyc,, +aiohttp/__pycache__/client_ws.cpython-310.pyc,, +aiohttp/__pycache__/connector.cpython-310.pyc,, +aiohttp/__pycache__/cookiejar.cpython-310.pyc,, +aiohttp/__pycache__/formdata.cpython-310.pyc,, +aiohttp/__pycache__/hdrs.cpython-310.pyc,, +aiohttp/__pycache__/helpers.cpython-310.pyc,, +aiohttp/__pycache__/http.cpython-310.pyc,, +aiohttp/__pycache__/http_exceptions.cpython-310.pyc,, +aiohttp/__pycache__/http_parser.cpython-310.pyc,, +aiohttp/__pycache__/http_websocket.cpython-310.pyc,, +aiohttp/__pycache__/http_writer.cpython-310.pyc,, +aiohttp/__pycache__/locks.cpython-310.pyc,, +aiohttp/__pycache__/log.cpython-310.pyc,, +aiohttp/__pycache__/multipart.cpython-310.pyc,, +aiohttp/__pycache__/payload.cpython-310.pyc,, +aiohttp/__pycache__/payload_streamer.cpython-310.pyc,, +aiohttp/__pycache__/pytest_plugin.cpython-310.pyc,, +aiohttp/__pycache__/resolver.cpython-310.pyc,, +aiohttp/__pycache__/streams.cpython-310.pyc,, +aiohttp/__pycache__/tcp_helpers.cpython-310.pyc,, +aiohttp/__pycache__/test_utils.cpython-310.pyc,, +aiohttp/__pycache__/tracing.cpython-310.pyc,, +aiohttp/__pycache__/typedefs.cpython-310.pyc,, +aiohttp/__pycache__/web.cpython-310.pyc,, +aiohttp/__pycache__/web_app.cpython-310.pyc,, +aiohttp/__pycache__/web_exceptions.cpython-310.pyc,, +aiohttp/__pycache__/web_fileresponse.cpython-310.pyc,, +aiohttp/__pycache__/web_log.cpython-310.pyc,, +aiohttp/__pycache__/web_middlewares.cpython-310.pyc,, +aiohttp/__pycache__/web_protocol.cpython-310.pyc,, +aiohttp/__pycache__/web_request.cpython-310.pyc,, +aiohttp/__pycache__/web_response.cpython-310.pyc,, +aiohttp/__pycache__/web_routedef.cpython-310.pyc,, +aiohttp/__pycache__/web_runner.cpython-310.pyc,, +aiohttp/__pycache__/web_server.cpython-310.pyc,, +aiohttp/__pycache__/web_urldispatcher.cpython-310.pyc,, +aiohttp/__pycache__/web_ws.cpython-310.pyc,, +aiohttp/__pycache__/worker.cpython-310.pyc,, +aiohttp/_cparser.pxd,sha256=5tE01W1fUWqytcOyldDUQKO--RH0OE1QYgQBiJWh-DM,4998 +aiohttp/_find_header.pxd,sha256=0GfwFCPN2zxEKTO1_MA5sYq2UfzsG8kcV3aTqvwlz3g,68 +aiohttp/_headers.pxi,sha256=n701k28dVPjwRnx5j6LpJhLTfj7dqu2vJt7f0O60Oyg,2007 +aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so,sha256=TyQOeZl_w-N9hPpEfVVUmWmrWfoCWulVVKOCOsHe_m0,332504 +aiohttp/_helpers.pyi,sha256=ZoKiJSS51PxELhI2cmIr5737YjjZcJt7FbIRO3ym1Ss,202 +aiohttp/_helpers.pyx,sha256=XeLbNft5X_4ifi8QB8i6TyrRuayijMSO3IDHeSA89uM,1049 +aiohttp/_http_parser.cpython-310-x86_64-linux-gnu.so,sha256=Pe8F0USaQ7FQ0CqNv9wirSJ8hj_U9PPcC9X8BgLZs-Y,2254880 +aiohttp/_http_parser.pyx,sha256=1u38_ESw5VgSqajx1mnGdO6Hqk0ov9PxeFreHq4ktoM,27336 +aiohttp/_http_writer.cpython-310-x86_64-linux-gnu.so,sha256=HmkvRZLOtrvHq5bgUW0j4nkQDG0Eo3_NyC-C8V2XDiI,308976 +aiohttp/_http_writer.pyx,sha256=aIHAp8g4ZV5kbGRdmZce-vXjELw2M6fGKyJuOdgYQqw,4575 +aiohttp/_websocket.cpython-310-x86_64-linux-gnu.so,sha256=WhUr4j8sMsYfj3HeADQT_XQccjbeQ8ytGlUmgwQ6F6M,111976 +aiohttp/_websocket.pyx,sha256=1XuOSNDCbyDrzF5uMA2isqausSs8l2jWTLDlNDLM9Io,1561 +aiohttp/abc.py,sha256=0FhHtbb3W7wRNtJISACN1teP8LZ49553v5Xoh5zNAFQ,5505 +aiohttp/base_protocol.py,sha256=5JUyuIGwKf7sFhf0YLAnk36_hkSIxBnP4hN09RdMGGk,2741 +aiohttp/client.py,sha256=5dTKnaqzZvbEjd4M6yBOhDDR1uErDKu_xI3xGlzzYjs,45037 +aiohttp/client_exceptions.py,sha256=tiaUIb2xy3s-O-KTPVL6L_P0rpGQT0rV1dujwwgJKoI,9270 +aiohttp/client_proto.py,sha256=c4TAK8CVdycukenCJj7LtlQ3SEj04ilJ3DfmN3kPgeE,8170 +aiohttp/client_reqrep.py,sha256=ULqrco544ZQgYruj1mFD6Fd3af2fOZWJqn07R8rB5J8,36973 +aiohttp/client_ws.py,sha256=Sc0u3S-vZMadtEO6JpbLhVVw7KgtlsgZWHwaSYjkN0I,10516 +aiohttp/connector.py,sha256=0EPxWYzIF3UudRFL77QBVico5EfSMg9mPoy75m1aBhw,51177 +aiohttp/cookiejar.py,sha256=y4bBJoIkJSR9YNGsI2ZqRLdBrsb5Wrph9lljrvncBso,13655 +aiohttp/formdata.py,sha256=q2gpeiM9NFsl_eSFVxHZ7Qez6RbM8_BujERMkooQkx0,6106 +aiohttp/hdrs.py,sha256=owNRw0dgodeDWyobBVLkY88dLbkNoM20czE9xm40oDE,4724 +aiohttp/helpers.py,sha256=vcFashughJ7i_FQuOwdbDK1nwTPYB39xtXYFomhcoLk,26398 +aiohttp/http.py,sha256=_B20NZc113uNtg0jabY-x4_3RrIpTsqmbRIwMcm-Bco,1800 +aiohttp/http_exceptions.py,sha256=eACQt7azwNbUi8aqXB5J2tIcc_CB4_4y4mLM9SZd4tM,2586 +aiohttp/http_parser.py,sha256=_8ESr_Qo22D7tsLtmzJHK2vysqgbI06WWiGmPm5fjWc,33092 +aiohttp/http_websocket.py,sha256=X6kzIgu0-wRLU9yQxP4iH-Hv_36uVjTCOmF2HgpZLlk,25299 +aiohttp/http_writer.py,sha256=PGQjDjtLCluVuxao1Vef8SN9lJFO8joASSHbHipTGgQ,5933 +aiohttp/locks.py,sha256=wRYFo1U82LwBBdqwU24JEPaoTAlKaaJd2FtfDKhkTb4,1136 +aiohttp/log.py,sha256=BbNKx9e3VMIm0xYjZI0IcBBoS7wjdeIeSaiJE7-qK2g,325 +aiohttp/multipart.py,sha256=gmqFziP4ou8ZuoAOibOjoW7OJOsURzI5gkxoLPARQy8,32313 +aiohttp/payload.py,sha256=rachrZ66vC90f7swPXINaatGpPcCSVkiCCVKhrCFfuw,13634 +aiohttp/payload_streamer.py,sha256=3WmZ77SQ4dc8aKU6i2ZAb8L7FPdesCSoqRonSVOd_kE,2112 +aiohttp/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 +aiohttp/pytest_plugin.py,sha256=4-5LfdrnZIBP2wLp8CjE54eshOolFbBpOTufvp4tUcI,11772 +aiohttp/resolver.py,sha256=CASOnXp5oZh_1sCFWzFlD-5x3V49HAXbAJw-5RYjgms,5092 +aiohttp/streams.py,sha256=_OTvFQVA8-8GJ120Y95lH-hmLq1QaFNBFdaA49TECIc,20758 +aiohttp/tcp_helpers.py,sha256=BSadqVWaBpMFDRWnhaaR941N9MiDZ7bdTrxgCb0CW-M,961 +aiohttp/test_utils.py,sha256=MNQb4Zq6rKLLC3PABy5hIjONlsoxd-lc3OirNGHIJS4,21434 +aiohttp/tracing.py,sha256=gn_O9btTDB66nQ1wJWT3N2gEsO5kYFSIynnd8cp4YgA,15177 +aiohttp/typedefs.py,sha256=oLnG3fxcBEdu4kIzUi0Npcg5kjq01cRkK27VSgjNnmw,1766 +aiohttp/web.py,sha256=bAhkE0oeP6eI06ohkBoPu4ZLchctpEz23cVym_JECSg,18081 +aiohttp/web_app.py,sha256=QkVmy8pR_oaJ3od2fYfSfjzfZ8oGEPg9FPW_d0r2THo,17170 +aiohttp/web_exceptions.py,sha256=T_ghTJB_hnPkoWa3qyeY_GtVbehYQwPCpcCRb-072N0,10098 +aiohttp/web_fileresponse.py,sha256=F_xRvYFL2ox4oVNW3WSjwQ6LmVpkkYM8MPxsqiNsfUg,10784 +aiohttp/web_log.py,sha256=OH-C5shv59-nXchWX8owfLfToMxVdtj0PuK3uGIGEJQ,7557 +aiohttp/web_middlewares.py,sha256=nnllFgxX9GjvkG3YW43jPDH7C03iCfyMBxhYw-OkTI0,4137 +aiohttp/web_protocol.py,sha256=EFr0sy29rW7ffRz-tlRlBnHogmlyt6YvaJP6X1Sg3i8,22399 +aiohttp/web_request.py,sha256=t0RvF_yOJG6uq9-nZjmwXjfqc9Mvgpc3sXUxC67uGvw,28187 +aiohttp/web_response.py,sha256=7WTmyeXY2DrWAhr9HWuxY1SecgxiO_QwRql86PSUS-U,27471 +aiohttp/web_routedef.py,sha256=EFk3v1dcFnLimTT5z0JSBO3PShF0w9sIzfK9iJd-LNs,6152 +aiohttp/web_runner.py,sha256=EzxH4v1lntydU3W-c6iLgDu5LI7kAyD7sAmkz6W5-9g,11157 +aiohttp/web_server.py,sha256=EA1YUxv_4szhpaED1O_VVCUFHNhPUJhl2Cq7W1BK72s,2050 +aiohttp/web_urldispatcher.py,sha256=IAvlOBqCPLjasMnYtCwSqbhj-j1JTQRRHFnNvMFd4d4,39483 +aiohttp/web_ws.py,sha256=8Qmp_zH-F7t9V4NLRFwfoTBr4oWuo3cZrnYT-i-zBI0,17144 +aiohttp/worker.py,sha256=Cbx1KyVligvWa6kje0hSXhdjgJ1AORLN1qu8qJ0LzSQ,8763 diff --git a/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/WHEEL b/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/WHEEL new file mode 100644 index 0000000..a9630e4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/top_level.txt new file mode 100644 index 0000000..ee4ba4f --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp-3.8.4.dist-info/top_level.txt @@ -0,0 +1 @@ +aiohttp diff --git a/venv/lib/python3.10/site-packages/aiohttp/.hash/_cparser.pxd.hash b/venv/lib/python3.10/site-packages/aiohttp/.hash/_cparser.pxd.hash new file mode 100644 index 0000000..a4c37d2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/.hash/_cparser.pxd.hash @@ -0,0 +1 @@ +e6d134d56d5f516ab2b5c3b295d0d440a3bef911f4384d506204018895a1f833 /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd diff --git a/venv/lib/python3.10/site-packages/aiohttp/.hash/_find_header.pxd.hash b/venv/lib/python3.10/site-packages/aiohttp/.hash/_find_header.pxd.hash new file mode 100644 index 0000000..f006c2d --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/.hash/_find_header.pxd.hash @@ -0,0 +1 @@ +d067f01423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78 /home/runner/work/aiohttp/aiohttp/aiohttp/_find_header.pxd diff --git a/venv/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyi.hash b/venv/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyi.hash new file mode 100644 index 0000000..6a30d63 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyi.hash @@ -0,0 +1 @@ +6682a22524b9d4fc442e123672622be7bdfb6238d9709b7b15b2113b7ca6d52b /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyi diff --git a/venv/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyx.hash b/venv/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyx.hash new file mode 100644 index 0000000..8f38727 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyx.hash @@ -0,0 +1 @@ +5de2db35fb795ffe227e2f1007c8ba4f2ad1b9aca28cc48edc80c779203cf6e3 /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyx diff --git a/venv/lib/python3.10/site-packages/aiohttp/.hash/_http_parser.pyx.hash b/venv/lib/python3.10/site-packages/aiohttp/.hash/_http_parser.pyx.hash new file mode 100644 index 0000000..7a414c4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/.hash/_http_parser.pyx.hash @@ -0,0 +1 @@ +d6edfcfc44b0e55812a9a8f1d669c674ee87aa4d28bfd3f1785ade1eae24b683 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx diff --git a/venv/lib/python3.10/site-packages/aiohttp/.hash/_http_writer.pyx.hash b/venv/lib/python3.10/site-packages/aiohttp/.hash/_http_writer.pyx.hash new file mode 100644 index 0000000..8e1aaab --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/.hash/_http_writer.pyx.hash @@ -0,0 +1 @@ +6881c0a7c838655e646c645d99971efaf5e310bc3633a7c62b226e39d81842ac /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx diff --git a/venv/lib/python3.10/site-packages/aiohttp/.hash/_websocket.pyx.hash b/venv/lib/python3.10/site-packages/aiohttp/.hash/_websocket.pyx.hash new file mode 100644 index 0000000..ddbb4c7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/.hash/_websocket.pyx.hash @@ -0,0 +1 @@ +d57b8e48d0c26f20ebcc5e6e300da2b2a6aeb12b3c9768d64cb0e53432ccf48a /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket.pyx diff --git a/venv/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash b/venv/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash new file mode 100644 index 0000000..1c8653f --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash @@ -0,0 +1 @@ +a30351c34760a1d7835b2a1b0552e463cf1d2db90da0cdb473313dc66e34a031 /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py diff --git a/venv/lib/python3.10/site-packages/aiohttp/__init__.py b/venv/lib/python3.10/site-packages/aiohttp/__init__.py new file mode 100644 index 0000000..34022f0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/__init__.py @@ -0,0 +1,216 @@ +__version__ = "3.8.4" + +from typing import Tuple + +from . import hdrs as hdrs +from .client import ( + BaseConnector as BaseConnector, + ClientConnectionError as ClientConnectionError, + ClientConnectorCertificateError as ClientConnectorCertificateError, + ClientConnectorError as ClientConnectorError, + ClientConnectorSSLError as ClientConnectorSSLError, + ClientError as ClientError, + ClientHttpProxyError as ClientHttpProxyError, + ClientOSError as ClientOSError, + ClientPayloadError as ClientPayloadError, + ClientProxyConnectionError as ClientProxyConnectionError, + ClientRequest as ClientRequest, + ClientResponse as ClientResponse, + ClientResponseError as ClientResponseError, + ClientSession as ClientSession, + ClientSSLError as ClientSSLError, + ClientTimeout as ClientTimeout, + ClientWebSocketResponse as ClientWebSocketResponse, + ContentTypeError as ContentTypeError, + Fingerprint as Fingerprint, + InvalidURL as InvalidURL, + NamedPipeConnector as NamedPipeConnector, + RequestInfo as RequestInfo, + ServerConnectionError as ServerConnectionError, + ServerDisconnectedError as ServerDisconnectedError, + ServerFingerprintMismatch as ServerFingerprintMismatch, + ServerTimeoutError as ServerTimeoutError, + TCPConnector as TCPConnector, + TooManyRedirects as TooManyRedirects, + UnixConnector as UnixConnector, + WSServerHandshakeError as WSServerHandshakeError, + request as request, +) +from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar +from .formdata import FormData as FormData +from .helpers import BasicAuth, ChainMapProxy, ETag +from .http import ( + HttpVersion as HttpVersion, + HttpVersion10 as HttpVersion10, + HttpVersion11 as HttpVersion11, + WebSocketError as WebSocketError, + WSCloseCode as WSCloseCode, + WSMessage as WSMessage, + WSMsgType as WSMsgType, +) +from .multipart import ( + BadContentDispositionHeader as BadContentDispositionHeader, + BadContentDispositionParam as BadContentDispositionParam, + BodyPartReader as BodyPartReader, + MultipartReader as MultipartReader, + MultipartWriter as MultipartWriter, + content_disposition_filename as content_disposition_filename, + parse_content_disposition as parse_content_disposition, +) +from .payload import ( + PAYLOAD_REGISTRY as PAYLOAD_REGISTRY, + AsyncIterablePayload as AsyncIterablePayload, + BufferedReaderPayload as BufferedReaderPayload, + BytesIOPayload as BytesIOPayload, + BytesPayload as BytesPayload, + IOBasePayload as IOBasePayload, + JsonPayload as JsonPayload, + Payload as Payload, + StringIOPayload as StringIOPayload, + StringPayload as StringPayload, + TextIOPayload as TextIOPayload, + get_payload as get_payload, + payload_type as payload_type, +) +from .payload_streamer import streamer as streamer +from .resolver import ( + AsyncResolver as AsyncResolver, + DefaultResolver as DefaultResolver, + ThreadedResolver as ThreadedResolver, +) +from .streams import ( + EMPTY_PAYLOAD as EMPTY_PAYLOAD, + DataQueue as DataQueue, + EofStream as EofStream, + FlowControlDataQueue as FlowControlDataQueue, + StreamReader as StreamReader, +) +from .tracing import ( + TraceConfig as TraceConfig, + TraceConnectionCreateEndParams as TraceConnectionCreateEndParams, + TraceConnectionCreateStartParams as TraceConnectionCreateStartParams, + TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams, + TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams, + TraceConnectionReuseconnParams as TraceConnectionReuseconnParams, + TraceDnsCacheHitParams as TraceDnsCacheHitParams, + TraceDnsCacheMissParams as TraceDnsCacheMissParams, + TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams, + TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams, + TraceRequestChunkSentParams as TraceRequestChunkSentParams, + TraceRequestEndParams as TraceRequestEndParams, + TraceRequestExceptionParams as TraceRequestExceptionParams, + TraceRequestRedirectParams as TraceRequestRedirectParams, + TraceRequestStartParams as TraceRequestStartParams, + TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, +) + +__all__: Tuple[str, ...] = ( + "hdrs", + # client + "BaseConnector", + "ClientConnectionError", + "ClientConnectorCertificateError", + "ClientConnectorError", + "ClientConnectorSSLError", + "ClientError", + "ClientHttpProxyError", + "ClientOSError", + "ClientPayloadError", + "ClientProxyConnectionError", + "ClientResponse", + "ClientRequest", + "ClientResponseError", + "ClientSSLError", + "ClientSession", + "ClientTimeout", + "ClientWebSocketResponse", + "ContentTypeError", + "Fingerprint", + "InvalidURL", + "RequestInfo", + "ServerConnectionError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ServerTimeoutError", + "TCPConnector", + "TooManyRedirects", + "UnixConnector", + "NamedPipeConnector", + "WSServerHandshakeError", + "request", + # cookiejar + "CookieJar", + "DummyCookieJar", + # formdata + "FormData", + # helpers + "BasicAuth", + "ChainMapProxy", + "ETag", + # http + "HttpVersion", + "HttpVersion10", + "HttpVersion11", + "WSMsgType", + "WSCloseCode", + "WSMessage", + "WebSocketError", + # multipart + "BadContentDispositionHeader", + "BadContentDispositionParam", + "BodyPartReader", + "MultipartReader", + "MultipartWriter", + "content_disposition_filename", + "parse_content_disposition", + # payload + "AsyncIterablePayload", + "BufferedReaderPayload", + "BytesIOPayload", + "BytesPayload", + "IOBasePayload", + "JsonPayload", + "PAYLOAD_REGISTRY", + "Payload", + "StringIOPayload", + "StringPayload", + "TextIOPayload", + "get_payload", + "payload_type", + # payload_streamer + "streamer", + # resolver + "AsyncResolver", + "DefaultResolver", + "ThreadedResolver", + # streams + "DataQueue", + "EMPTY_PAYLOAD", + "EofStream", + "FlowControlDataQueue", + "StreamReader", + # tracing + "TraceConfig", + "TraceConnectionCreateEndParams", + "TraceConnectionCreateStartParams", + "TraceConnectionQueuedEndParams", + "TraceConnectionQueuedStartParams", + "TraceConnectionReuseconnParams", + "TraceDnsCacheHitParams", + "TraceDnsCacheMissParams", + "TraceDnsResolveHostEndParams", + "TraceDnsResolveHostStartParams", + "TraceRequestChunkSentParams", + "TraceRequestEndParams", + "TraceRequestExceptionParams", + "TraceRequestRedirectParams", + "TraceRequestStartParams", + "TraceResponseChunkReceivedParams", +) + +try: + from .worker import GunicornUVLoopWebWorker, GunicornWebWorker + + __all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker") +except ImportError: # pragma: no cover + pass diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..c605831 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/abc.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/abc.cpython-310.pyc new file mode 100644 index 0000000..6d2304c Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/abc.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/base_protocol.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/base_protocol.cpython-310.pyc new file mode 100644 index 0000000..31a74e9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/base_protocol.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc new file mode 100644 index 0000000..4eff2bf Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_exceptions.cpython-310.pyc new file mode 100644 index 0000000..ab77544 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_exceptions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_proto.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_proto.cpython-310.pyc new file mode 100644 index 0000000..33b9409 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_proto.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_reqrep.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_reqrep.cpython-310.pyc new file mode 100644 index 0000000..e713648 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_reqrep.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_ws.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_ws.cpython-310.pyc new file mode 100644 index 0000000..e57bc16 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_ws.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc new file mode 100644 index 0000000..f1bbe4e Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc new file mode 100644 index 0000000..62fe3e0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/formdata.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/formdata.cpython-310.pyc new file mode 100644 index 0000000..09fb0df Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/formdata.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/hdrs.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/hdrs.cpython-310.pyc new file mode 100644 index 0000000..66f9bf8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/hdrs.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc new file mode 100644 index 0000000..85a4296 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http.cpython-310.pyc new file mode 100644 index 0000000..a894b3c Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_exceptions.cpython-310.pyc new file mode 100644 index 0000000..0df3930 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_exceptions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_parser.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_parser.cpython-310.pyc new file mode 100644 index 0000000..6b691dc Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_parser.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_websocket.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_websocket.cpython-310.pyc new file mode 100644 index 0000000..6e85da6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_websocket.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_writer.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_writer.cpython-310.pyc new file mode 100644 index 0000000..aa0f7ab Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_writer.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/locks.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/locks.cpython-310.pyc new file mode 100644 index 0000000..c83073c Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/locks.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/log.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/log.cpython-310.pyc new file mode 100644 index 0000000..ed9d50c Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/log.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/multipart.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/multipart.cpython-310.pyc new file mode 100644 index 0000000..cad5a92 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/multipart.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/payload.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/payload.cpython-310.pyc new file mode 100644 index 0000000..d018d9a Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/payload.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/payload_streamer.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/payload_streamer.cpython-310.pyc new file mode 100644 index 0000000..5a99472 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/payload_streamer.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc new file mode 100644 index 0000000..922ec06 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc new file mode 100644 index 0000000..4ba3938 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc new file mode 100644 index 0000000..7f060a5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-310.pyc new file mode 100644 index 0000000..47434c8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/test_utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/test_utils.cpython-310.pyc new file mode 100644 index 0000000..fdb8c32 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/test_utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/tracing.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/tracing.cpython-310.pyc new file mode 100644 index 0000000..55b9ea4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/tracing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/typedefs.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/typedefs.cpython-310.pyc new file mode 100644 index 0000000..e877884 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/typedefs.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc new file mode 100644 index 0000000..a12910b Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_app.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_app.cpython-310.pyc new file mode 100644 index 0000000..15ca682 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_app.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc new file mode 100644 index 0000000..56a908c Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-310.pyc new file mode 100644 index 0000000..8086bae Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_log.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_log.cpython-310.pyc new file mode 100644 index 0000000..6d59ebc Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_log.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc new file mode 100644 index 0000000..8fe0e76 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_protocol.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_protocol.cpython-310.pyc new file mode 100644 index 0000000..be64a02 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_protocol.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_request.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_request.cpython-310.pyc new file mode 100644 index 0000000..ddd0ab1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_request.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_response.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_response.cpython-310.pyc new file mode 100644 index 0000000..e3516c7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_response.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc new file mode 100644 index 0000000..bfd737b Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_runner.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_runner.cpython-310.pyc new file mode 100644 index 0000000..d41fa3f Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_runner.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc new file mode 100644 index 0000000..dac5fd6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-310.pyc new file mode 100644 index 0000000..5aedd23 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_ws.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_ws.cpython-310.pyc new file mode 100644 index 0000000..0815cf8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_ws.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/__pycache__/worker.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/worker.cpython-310.pyc new file mode 100644 index 0000000..e32267f Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/__pycache__/worker.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/_cparser.pxd b/venv/lib/python3.10/site-packages/aiohttp/_cparser.pxd new file mode 100644 index 0000000..165dd61 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/_cparser.pxd @@ -0,0 +1,190 @@ +from libc.stdint cimport ( + int8_t, + int16_t, + int32_t, + int64_t, + uint8_t, + uint16_t, + uint32_t, + uint64_t, +) + + +cdef extern from "../vendor/llhttp/build/llhttp.h": + + struct llhttp__internal_s: + int32_t _index + void* _span_pos0 + void* _span_cb0 + int32_t error + const char* reason + const char* error_pos + void* data + void* _current + uint64_t content_length + uint8_t type + uint8_t method + uint8_t http_major + uint8_t http_minor + uint8_t header_state + uint8_t lenient_flags + uint8_t upgrade + uint8_t finish + uint16_t flags + uint16_t status_code + void* settings + + ctypedef llhttp__internal_s llhttp__internal_t + ctypedef llhttp__internal_t llhttp_t + + ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1 + ctypedef int (*llhttp_cb)(llhttp_t*) except -1 + + struct llhttp_settings_s: + llhttp_cb on_message_begin + llhttp_data_cb on_url + llhttp_data_cb on_status + llhttp_data_cb on_header_field + llhttp_data_cb on_header_value + llhttp_cb on_headers_complete + llhttp_data_cb on_body + llhttp_cb on_message_complete + llhttp_cb on_chunk_header + llhttp_cb on_chunk_complete + + llhttp_cb on_url_complete + llhttp_cb on_status_complete + llhttp_cb on_header_field_complete + llhttp_cb on_header_value_complete + + ctypedef llhttp_settings_s llhttp_settings_t + + enum llhttp_errno: + HPE_OK, + HPE_INTERNAL, + HPE_STRICT, + HPE_LF_EXPECTED, + HPE_UNEXPECTED_CONTENT_LENGTH, + HPE_CLOSED_CONNECTION, + HPE_INVALID_METHOD, + HPE_INVALID_URL, + HPE_INVALID_CONSTANT, + HPE_INVALID_VERSION, + HPE_INVALID_HEADER_TOKEN, + HPE_INVALID_CONTENT_LENGTH, + HPE_INVALID_CHUNK_SIZE, + HPE_INVALID_STATUS, + HPE_INVALID_EOF_STATE, + HPE_INVALID_TRANSFER_ENCODING, + HPE_CB_MESSAGE_BEGIN, + HPE_CB_HEADERS_COMPLETE, + HPE_CB_MESSAGE_COMPLETE, + HPE_CB_CHUNK_HEADER, + HPE_CB_CHUNK_COMPLETE, + HPE_PAUSED, + HPE_PAUSED_UPGRADE, + HPE_USER + + ctypedef llhttp_errno llhttp_errno_t + + enum llhttp_flags: + F_CONNECTION_KEEP_ALIVE, + F_CONNECTION_CLOSE, + F_CONNECTION_UPGRADE, + F_CHUNKED, + F_UPGRADE, + F_CONTENT_LENGTH, + F_SKIPBODY, + F_TRAILING, + F_TRANSFER_ENCODING + + enum llhttp_lenient_flags: + LENIENT_HEADERS, + LENIENT_CHUNKED_LENGTH + + enum llhttp_type: + HTTP_REQUEST, + HTTP_RESPONSE, + HTTP_BOTH + + enum llhttp_finish_t: + HTTP_FINISH_SAFE, + HTTP_FINISH_SAFE_WITH_CB, + HTTP_FINISH_UNSAFE + + enum llhttp_method: + HTTP_DELETE, + HTTP_GET, + HTTP_HEAD, + HTTP_POST, + HTTP_PUT, + HTTP_CONNECT, + HTTP_OPTIONS, + HTTP_TRACE, + HTTP_COPY, + HTTP_LOCK, + HTTP_MKCOL, + HTTP_MOVE, + HTTP_PROPFIND, + HTTP_PROPPATCH, + HTTP_SEARCH, + HTTP_UNLOCK, + HTTP_BIND, + HTTP_REBIND, + HTTP_UNBIND, + HTTP_ACL, + HTTP_REPORT, + HTTP_MKACTIVITY, + HTTP_CHECKOUT, + HTTP_MERGE, + HTTP_MSEARCH, + HTTP_NOTIFY, + HTTP_SUBSCRIBE, + HTTP_UNSUBSCRIBE, + HTTP_PATCH, + HTTP_PURGE, + HTTP_MKCALENDAR, + HTTP_LINK, + HTTP_UNLINK, + HTTP_SOURCE, + HTTP_PRI, + HTTP_DESCRIBE, + HTTP_ANNOUNCE, + HTTP_SETUP, + HTTP_PLAY, + HTTP_PAUSE, + HTTP_TEARDOWN, + HTTP_GET_PARAMETER, + HTTP_SET_PARAMETER, + HTTP_REDIRECT, + HTTP_RECORD, + HTTP_FLUSH + + ctypedef llhttp_method llhttp_method_t; + + void llhttp_settings_init(llhttp_settings_t* settings) + void llhttp_init(llhttp_t* parser, llhttp_type type, + const llhttp_settings_t* settings) + + llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len) + llhttp_errno_t llhttp_finish(llhttp_t* parser) + + int llhttp_message_needs_eof(const llhttp_t* parser) + + int llhttp_should_keep_alive(const llhttp_t* parser) + + void llhttp_pause(llhttp_t* parser) + void llhttp_resume(llhttp_t* parser) + + void llhttp_resume_after_upgrade(llhttp_t* parser) + + llhttp_errno_t llhttp_get_errno(const llhttp_t* parser) + const char* llhttp_get_error_reason(const llhttp_t* parser) + void llhttp_set_error_reason(llhttp_t* parser, const char* reason) + const char* llhttp_get_error_pos(const llhttp_t* parser) + const char* llhttp_errno_name(llhttp_errno_t err) + + const char* llhttp_method_name(llhttp_method_t method) + + void llhttp_set_lenient_headers(llhttp_t* parser, int enabled) + void llhttp_set_lenient_chunked_length(llhttp_t* parser, int enabled) diff --git a/venv/lib/python3.10/site-packages/aiohttp/_find_header.pxd b/venv/lib/python3.10/site-packages/aiohttp/_find_header.pxd new file mode 100644 index 0000000..37a6c37 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/_find_header.pxd @@ -0,0 +1,2 @@ +cdef extern from "_find_header.h": + int find_header(char *, int) diff --git a/venv/lib/python3.10/site-packages/aiohttp/_headers.pxi b/venv/lib/python3.10/site-packages/aiohttp/_headers.pxi new file mode 100644 index 0000000..3744721 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/_headers.pxi @@ -0,0 +1,83 @@ +# The file is autogenerated from aiohttp/hdrs.py +# Run ./tools/gen.py to update it after the origin changing. + +from . import hdrs +cdef tuple headers = ( + hdrs.ACCEPT, + hdrs.ACCEPT_CHARSET, + hdrs.ACCEPT_ENCODING, + hdrs.ACCEPT_LANGUAGE, + hdrs.ACCEPT_RANGES, + hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, + hdrs.ACCESS_CONTROL_ALLOW_HEADERS, + hdrs.ACCESS_CONTROL_ALLOW_METHODS, + hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, + hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, + hdrs.ACCESS_CONTROL_MAX_AGE, + hdrs.ACCESS_CONTROL_REQUEST_HEADERS, + hdrs.ACCESS_CONTROL_REQUEST_METHOD, + hdrs.AGE, + hdrs.ALLOW, + hdrs.AUTHORIZATION, + hdrs.CACHE_CONTROL, + hdrs.CONNECTION, + hdrs.CONTENT_DISPOSITION, + hdrs.CONTENT_ENCODING, + hdrs.CONTENT_LANGUAGE, + hdrs.CONTENT_LENGTH, + hdrs.CONTENT_LOCATION, + hdrs.CONTENT_MD5, + hdrs.CONTENT_RANGE, + hdrs.CONTENT_TRANSFER_ENCODING, + hdrs.CONTENT_TYPE, + hdrs.COOKIE, + hdrs.DATE, + hdrs.DESTINATION, + hdrs.DIGEST, + hdrs.ETAG, + hdrs.EXPECT, + hdrs.EXPIRES, + hdrs.FORWARDED, + hdrs.FROM, + hdrs.HOST, + hdrs.IF_MATCH, + hdrs.IF_MODIFIED_SINCE, + hdrs.IF_NONE_MATCH, + hdrs.IF_RANGE, + hdrs.IF_UNMODIFIED_SINCE, + hdrs.KEEP_ALIVE, + hdrs.LAST_EVENT_ID, + hdrs.LAST_MODIFIED, + hdrs.LINK, + hdrs.LOCATION, + hdrs.MAX_FORWARDS, + hdrs.ORIGIN, + hdrs.PRAGMA, + hdrs.PROXY_AUTHENTICATE, + hdrs.PROXY_AUTHORIZATION, + hdrs.RANGE, + hdrs.REFERER, + hdrs.RETRY_AFTER, + hdrs.SEC_WEBSOCKET_ACCEPT, + hdrs.SEC_WEBSOCKET_EXTENSIONS, + hdrs.SEC_WEBSOCKET_KEY, + hdrs.SEC_WEBSOCKET_KEY1, + hdrs.SEC_WEBSOCKET_PROTOCOL, + hdrs.SEC_WEBSOCKET_VERSION, + hdrs.SERVER, + hdrs.SET_COOKIE, + hdrs.TE, + hdrs.TRAILER, + hdrs.TRANSFER_ENCODING, + hdrs.URI, + hdrs.UPGRADE, + hdrs.USER_AGENT, + hdrs.VARY, + hdrs.VIA, + hdrs.WWW_AUTHENTICATE, + hdrs.WANT_DIGEST, + hdrs.WARNING, + hdrs.X_FORWARDED_FOR, + hdrs.X_FORWARDED_HOST, + hdrs.X_FORWARDED_PROTO, +) diff --git a/venv/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so new file mode 100755 index 0000000..f76b655 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/_helpers.pyi b/venv/lib/python3.10/site-packages/aiohttp/_helpers.pyi new file mode 100644 index 0000000..1e35893 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/_helpers.pyi @@ -0,0 +1,6 @@ +from typing import Any + +class reify: + def __init__(self, wrapped: Any) -> None: ... + def __get__(self, inst: Any, owner: Any) -> Any: ... + def __set__(self, inst: Any, value: Any) -> None: ... diff --git a/venv/lib/python3.10/site-packages/aiohttp/_helpers.pyx b/venv/lib/python3.10/site-packages/aiohttp/_helpers.pyx new file mode 100644 index 0000000..665f367 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/_helpers.pyx @@ -0,0 +1,35 @@ +cdef class reify: + """Use as a class method decorator. It operates almost exactly like + the Python `@property` decorator, but it puts the result of the + method it decorates into the instance dict after the first call, + effectively replacing the function it decorates with an instance + variable. It is, in Python parlance, a data descriptor. + + """ + + cdef object wrapped + cdef object name + + def __init__(self, wrapped): + self.wrapped = wrapped + self.name = wrapped.__name__ + + @property + def __doc__(self): + return self.wrapped.__doc__ + + def __get__(self, inst, owner): + try: + try: + return inst._cache[self.name] + except KeyError: + val = self.wrapped(inst) + inst._cache[self.name] = val + return val + except AttributeError: + if inst is None: + return self + raise + + def __set__(self, inst, value): + raise AttributeError("reified property is read-only") diff --git a/venv/lib/python3.10/site-packages/aiohttp/_http_parser.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/aiohttp/_http_parser.cpython-310-x86_64-linux-gnu.so new file mode 100755 index 0000000..2ae972a Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/_http_parser.cpython-310-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/_http_parser.pyx b/venv/lib/python3.10/site-packages/aiohttp/_http_parser.pyx new file mode 100644 index 0000000..bebd989 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/_http_parser.pyx @@ -0,0 +1,832 @@ +#cython: language_level=3 +# +# Based on https://github.com/MagicStack/httptools +# +from __future__ import absolute_import, print_function + +from cpython cimport ( + Py_buffer, + PyBUF_SIMPLE, + PyBuffer_Release, + PyBytes_AsString, + PyBytes_AsStringAndSize, + PyObject_GetBuffer, +) +from cpython.mem cimport PyMem_Free, PyMem_Malloc +from libc.limits cimport ULLONG_MAX +from libc.string cimport memcpy + +from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy +from yarl import URL as _URL + +from aiohttp import hdrs + +from .http_exceptions import ( + BadHttpMessage, + BadStatusLine, + ContentLengthError, + InvalidHeader, + InvalidURLError, + LineTooLong, + PayloadEncodingError, + TransferEncodingError, +) +from .http_parser import DeflateBuffer as _DeflateBuffer +from .http_writer import ( + HttpVersion as _HttpVersion, + HttpVersion10 as _HttpVersion10, + HttpVersion11 as _HttpVersion11, +) +from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader + +cimport cython + +from aiohttp cimport _cparser as cparser + +include "_headers.pxi" + +from aiohttp cimport _find_header + +DEF DEFAULT_FREELIST_SIZE = 250 + +cdef extern from "Python.h": + int PyByteArray_Resize(object, Py_ssize_t) except -1 + Py_ssize_t PyByteArray_Size(object) except -1 + char* PyByteArray_AsString(object) + +__all__ = ('HttpRequestParser', 'HttpResponseParser', + 'RawRequestMessage', 'RawResponseMessage') + +cdef object URL = _URL +cdef object URL_build = URL.build +cdef object CIMultiDict = _CIMultiDict +cdef object CIMultiDictProxy = _CIMultiDictProxy +cdef object HttpVersion = _HttpVersion +cdef object HttpVersion10 = _HttpVersion10 +cdef object HttpVersion11 = _HttpVersion11 +cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 +cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING +cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD +cdef object StreamReader = _StreamReader +cdef object DeflateBuffer = _DeflateBuffer + + +cdef inline object extend(object buf, const char* at, size_t length): + cdef Py_ssize_t s + cdef char* ptr + s = PyByteArray_Size(buf) + PyByteArray_Resize(buf, s + length) + ptr = PyByteArray_AsString(buf) + memcpy(ptr + s, at, length) + + +DEF METHODS_COUNT = 46; + +cdef list _http_method = [] + +for i in range(METHODS_COUNT): + _http_method.append( + cparser.llhttp_method_name( i).decode('ascii')) + + +cdef inline str http_method_str(int i): + if i < METHODS_COUNT: + return _http_method[i] + else: + return "" + +cdef inline object find_header(bytes raw_header): + cdef Py_ssize_t size + cdef char *buf + cdef int idx + PyBytes_AsStringAndSize(raw_header, &buf, &size) + idx = _find_header.find_header(buf, size) + if idx == -1: + return raw_header.decode('utf-8', 'surrogateescape') + return headers[idx] + + +@cython.freelist(DEFAULT_FREELIST_SIZE) +cdef class RawRequestMessage: + cdef readonly str method + cdef readonly str path + cdef readonly object version # HttpVersion + cdef readonly object headers # CIMultiDict + cdef readonly object raw_headers # tuple + cdef readonly object should_close + cdef readonly object compression + cdef readonly object upgrade + cdef readonly object chunked + cdef readonly object url # yarl.URL + + def __init__(self, method, path, version, headers, raw_headers, + should_close, compression, upgrade, chunked, url): + self.method = method + self.path = path + self.version = version + self.headers = headers + self.raw_headers = raw_headers + self.should_close = should_close + self.compression = compression + self.upgrade = upgrade + self.chunked = chunked + self.url = url + + def __repr__(self): + info = [] + info.append(("method", self.method)) + info.append(("path", self.path)) + info.append(("version", self.version)) + info.append(("headers", self.headers)) + info.append(("raw_headers", self.raw_headers)) + info.append(("should_close", self.should_close)) + info.append(("compression", self.compression)) + info.append(("upgrade", self.upgrade)) + info.append(("chunked", self.chunked)) + info.append(("url", self.url)) + sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + return '' + + def _replace(self, **dct): + cdef RawRequestMessage ret + ret = _new_request_message(self.method, + self.path, + self.version, + self.headers, + self.raw_headers, + self.should_close, + self.compression, + self.upgrade, + self.chunked, + self.url) + if "method" in dct: + ret.method = dct["method"] + if "path" in dct: + ret.path = dct["path"] + if "version" in dct: + ret.version = dct["version"] + if "headers" in dct: + ret.headers = dct["headers"] + if "raw_headers" in dct: + ret.raw_headers = dct["raw_headers"] + if "should_close" in dct: + ret.should_close = dct["should_close"] + if "compression" in dct: + ret.compression = dct["compression"] + if "upgrade" in dct: + ret.upgrade = dct["upgrade"] + if "chunked" in dct: + ret.chunked = dct["chunked"] + if "url" in dct: + ret.url = dct["url"] + return ret + +cdef _new_request_message(str method, + str path, + object version, + object headers, + object raw_headers, + bint should_close, + object compression, + bint upgrade, + bint chunked, + object url): + cdef RawRequestMessage ret + ret = RawRequestMessage.__new__(RawRequestMessage) + ret.method = method + ret.path = path + ret.version = version + ret.headers = headers + ret.raw_headers = raw_headers + ret.should_close = should_close + ret.compression = compression + ret.upgrade = upgrade + ret.chunked = chunked + ret.url = url + return ret + + +@cython.freelist(DEFAULT_FREELIST_SIZE) +cdef class RawResponseMessage: + cdef readonly object version # HttpVersion + cdef readonly int code + cdef readonly str reason + cdef readonly object headers # CIMultiDict + cdef readonly object raw_headers # tuple + cdef readonly object should_close + cdef readonly object compression + cdef readonly object upgrade + cdef readonly object chunked + + def __init__(self, version, code, reason, headers, raw_headers, + should_close, compression, upgrade, chunked): + self.version = version + self.code = code + self.reason = reason + self.headers = headers + self.raw_headers = raw_headers + self.should_close = should_close + self.compression = compression + self.upgrade = upgrade + self.chunked = chunked + + def __repr__(self): + info = [] + info.append(("version", self.version)) + info.append(("code", self.code)) + info.append(("reason", self.reason)) + info.append(("headers", self.headers)) + info.append(("raw_headers", self.raw_headers)) + info.append(("should_close", self.should_close)) + info.append(("compression", self.compression)) + info.append(("upgrade", self.upgrade)) + info.append(("chunked", self.chunked)) + sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + return '' + + +cdef _new_response_message(object version, + int code, + str reason, + object headers, + object raw_headers, + bint should_close, + object compression, + bint upgrade, + bint chunked): + cdef RawResponseMessage ret + ret = RawResponseMessage.__new__(RawResponseMessage) + ret.version = version + ret.code = code + ret.reason = reason + ret.headers = headers + ret.raw_headers = raw_headers + ret.should_close = should_close + ret.compression = compression + ret.upgrade = upgrade + ret.chunked = chunked + return ret + + +@cython.internal +cdef class HttpParser: + + cdef: + cparser.llhttp_t* _cparser + cparser.llhttp_settings_t* _csettings + + bytearray _raw_name + bytearray _raw_value + bint _has_value + + object _protocol + object _loop + object _timer + + size_t _max_line_size + size_t _max_field_size + size_t _max_headers + bint _response_with_body + bint _read_until_eof + + bint _started + object _url + bytearray _buf + str _path + str _reason + object _headers + list _raw_headers + bint _upgraded + list _messages + object _payload + bint _payload_error + object _payload_exception + object _last_error + bint _auto_decompress + int _limit + + str _content_encoding + + Py_buffer py_buf + + def __cinit__(self): + self._cparser = \ + PyMem_Malloc(sizeof(cparser.llhttp_t)) + if self._cparser is NULL: + raise MemoryError() + + self._csettings = \ + PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) + if self._csettings is NULL: + raise MemoryError() + + def __dealloc__(self): + PyMem_Free(self._cparser) + PyMem_Free(self._csettings) + + cdef _init( + self, cparser.llhttp_type mode, + object protocol, object loop, int limit, + object timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False, + bint auto_decompress=True, + ): + cparser.llhttp_settings_init(self._csettings) + cparser.llhttp_init(self._cparser, mode, self._csettings) + self._cparser.data = self + self._cparser.content_length = 0 + + self._protocol = protocol + self._loop = loop + self._timer = timer + + self._buf = bytearray() + self._payload = None + self._payload_error = 0 + self._payload_exception = payload_exception + self._messages = [] + + self._raw_name = bytearray() + self._raw_value = bytearray() + self._has_value = False + + self._max_line_size = max_line_size + self._max_headers = max_headers + self._max_field_size = max_field_size + self._response_with_body = response_with_body + self._read_until_eof = read_until_eof + self._upgraded = False + self._auto_decompress = auto_decompress + self._content_encoding = None + + self._csettings.on_url = cb_on_url + self._csettings.on_status = cb_on_status + self._csettings.on_header_field = cb_on_header_field + self._csettings.on_header_value = cb_on_header_value + self._csettings.on_headers_complete = cb_on_headers_complete + self._csettings.on_body = cb_on_body + self._csettings.on_message_begin = cb_on_message_begin + self._csettings.on_message_complete = cb_on_message_complete + self._csettings.on_chunk_header = cb_on_chunk_header + self._csettings.on_chunk_complete = cb_on_chunk_complete + + self._last_error = None + self._limit = limit + + cdef _process_header(self): + if self._raw_name: + raw_name = bytes(self._raw_name) + raw_value = bytes(self._raw_value) + + name = find_header(raw_name) + value = raw_value.decode('utf-8', 'surrogateescape') + + self._headers.add(name, value) + + if name is CONTENT_ENCODING: + self._content_encoding = value + + PyByteArray_Resize(self._raw_name, 0) + PyByteArray_Resize(self._raw_value, 0) + self._has_value = False + self._raw_headers.append((raw_name, raw_value)) + + cdef _on_header_field(self, char* at, size_t length): + cdef Py_ssize_t size + cdef char *buf + if self._has_value: + self._process_header() + + size = PyByteArray_Size(self._raw_name) + PyByteArray_Resize(self._raw_name, size + length) + buf = PyByteArray_AsString(self._raw_name) + memcpy(buf + size, at, length) + + cdef _on_header_value(self, char* at, size_t length): + cdef Py_ssize_t size + cdef char *buf + + size = PyByteArray_Size(self._raw_value) + PyByteArray_Resize(self._raw_value, size + length) + buf = PyByteArray_AsString(self._raw_value) + memcpy(buf + size, at, length) + self._has_value = True + + cdef _on_headers_complete(self): + self._process_header() + + method = http_method_str(self._cparser.method) + should_close = not cparser.llhttp_should_keep_alive(self._cparser) + upgrade = self._cparser.upgrade + chunked = self._cparser.flags & cparser.F_CHUNKED + + raw_headers = tuple(self._raw_headers) + headers = CIMultiDictProxy(self._headers) + + if upgrade or self._cparser.method == cparser.HTTP_CONNECT: + self._upgraded = True + + # do not support old websocket spec + if SEC_WEBSOCKET_KEY1 in headers: + raise InvalidHeader(SEC_WEBSOCKET_KEY1) + + encoding = None + enc = self._content_encoding + if enc is not None: + self._content_encoding = None + enc = enc.lower() + if enc in ('gzip', 'deflate', 'br'): + encoding = enc + + if self._cparser.type == cparser.HTTP_REQUEST: + msg = _new_request_message( + method, self._path, + self.http_version(), headers, raw_headers, + should_close, encoding, upgrade, chunked, self._url) + else: + msg = _new_response_message( + self.http_version(), self._cparser.status_code, self._reason, + headers, raw_headers, should_close, encoding, + upgrade, chunked) + + if ( + ULLONG_MAX > self._cparser.content_length > 0 or chunked or + self._cparser.method == cparser.HTTP_CONNECT or + (self._cparser.status_code >= 199 and + self._cparser.content_length == 0 and + self._read_until_eof) + ): + payload = StreamReader( + self._protocol, timer=self._timer, loop=self._loop, + limit=self._limit) + else: + payload = EMPTY_PAYLOAD + + self._payload = payload + if encoding is not None and self._auto_decompress: + self._payload = DeflateBuffer(payload, encoding) + + if not self._response_with_body: + payload = EMPTY_PAYLOAD + + self._messages.append((msg, payload)) + + cdef _on_message_complete(self): + self._payload.feed_eof() + self._payload = None + + cdef _on_chunk_header(self): + self._payload.begin_http_chunk_receiving() + + cdef _on_chunk_complete(self): + self._payload.end_http_chunk_receiving() + + cdef object _on_status_complete(self): + pass + + cdef inline http_version(self): + cdef cparser.llhttp_t* parser = self._cparser + + if parser.http_major == 1: + if parser.http_minor == 0: + return HttpVersion10 + elif parser.http_minor == 1: + return HttpVersion11 + + return HttpVersion(parser.http_major, parser.http_minor) + + ### Public API ### + + def feed_eof(self): + cdef bytes desc + + if self._payload is not None: + if self._cparser.flags & cparser.F_CHUNKED: + raise TransferEncodingError( + "Not enough data for satisfy transfer length header.") + elif self._cparser.flags & cparser.F_CONTENT_LENGTH: + raise ContentLengthError( + "Not enough data for satisfy content length header.") + elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK: + desc = cparser.llhttp_get_error_reason(self._cparser) + raise PayloadEncodingError(desc.decode('latin-1')) + else: + self._payload.feed_eof() + elif self._started: + self._on_headers_complete() + if self._messages: + return self._messages[-1][0] + + def feed_data(self, data): + cdef: + size_t data_len + size_t nb + cdef cparser.llhttp_errno_t errno + + PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) + data_len = self.py_buf.len + + errno = cparser.llhttp_execute( + self._cparser, + self.py_buf.buf, + data_len) + + if errno is cparser.HPE_PAUSED_UPGRADE: + cparser.llhttp_resume_after_upgrade(self._cparser) + + nb = cparser.llhttp_get_error_pos(self._cparser) - self.py_buf.buf + + PyBuffer_Release(&self.py_buf) + + if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE): + if self._payload_error == 0: + if self._last_error is not None: + ex = self._last_error + self._last_error = None + else: + ex = parser_error_from_errno(self._cparser) + self._payload = None + raise ex + + if self._messages: + messages = self._messages + self._messages = [] + else: + messages = () + + if self._upgraded: + return messages, True, data[nb:] + else: + return messages, False, b'' + + def set_upgraded(self, val): + self._upgraded = val + + +cdef class HttpRequestParser(HttpParser): + + def __init__( + self, protocol, loop, int limit, timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False, + bint auto_decompress=True, + ): + self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer, + max_line_size, max_headers, max_field_size, + payload_exception, response_with_body, read_until_eof, + auto_decompress) + + cdef object _on_status_complete(self): + cdef int idx1, idx2 + if not self._buf: + return + self._path = self._buf.decode('utf-8', 'surrogateescape') + try: + idx3 = len(self._path) + if self._cparser.method == cparser.HTTP_CONNECT: + # authority-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 + self._url = URL.build(authority=self._path, encoded=True) + elif idx3 > 1 and self._path[0] == '/': + # origin-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 + idx1 = self._path.find("?") + if idx1 == -1: + query = "" + idx2 = self._path.find("#") + if idx2 == -1: + path = self._path + fragment = "" + else: + path = self._path[0: idx2] + fragment = self._path[idx2+1:] + + else: + path = self._path[0:idx1] + idx1 += 1 + idx2 = self._path.find("#", idx1+1) + if idx2 == -1: + query = self._path[idx1:] + fragment = "" + else: + query = self._path[idx1: idx2] + fragment = self._path[idx2+1:] + + self._url = URL.build( + path=path, + query_string=query, + fragment=fragment, + encoded=True, + ) + else: + # absolute-form for proxy maybe, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 + self._url = URL(self._path, encoded=True) + finally: + PyByteArray_Resize(self._buf, 0) + + +cdef class HttpResponseParser(HttpParser): + + def __init__( + self, protocol, loop, int limit, timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False, + bint auto_decompress=True + ): + self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer, + max_line_size, max_headers, max_field_size, + payload_exception, response_with_body, read_until_eof, + auto_decompress) + + cdef object _on_status_complete(self): + if self._buf: + self._reason = self._buf.decode('utf-8', 'surrogateescape') + PyByteArray_Resize(self._buf, 0) + else: + self._reason = self._reason or '' + +cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + + pyparser._started = True + pyparser._headers = CIMultiDict() + pyparser._raw_headers = [] + PyByteArray_Resize(pyparser._buf, 0) + pyparser._path = None + pyparser._reason = None + return 0 + + +cdef int cb_on_url(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + try: + if length > pyparser._max_line_size: + raise LineTooLong( + 'Status line is too long', pyparser._max_line_size, length) + extend(pyparser._buf, at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_status(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef str reason + try: + if length > pyparser._max_line_size: + raise LineTooLong( + 'Status line is too long', pyparser._max_line_size, length) + extend(pyparser._buf, at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_header_field(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef Py_ssize_t size + try: + pyparser._on_status_complete() + size = len(pyparser._raw_name) + length + if size > pyparser._max_field_size: + raise LineTooLong( + 'Header name is too long', pyparser._max_field_size, size) + pyparser._on_header_field(at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_header_value(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef Py_ssize_t size + try: + size = len(pyparser._raw_value) + length + if size > pyparser._max_field_size: + raise LineTooLong( + 'Header value is too long', pyparser._max_field_size, size) + pyparser._on_header_value(at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_status_complete() + pyparser._on_headers_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + if ( + pyparser._cparser.upgrade or + pyparser._cparser.method == cparser.HTTP_CONNECT + ): + return 2 + else: + return 0 + + +cdef int cb_on_body(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef bytes body = at[:length] + try: + pyparser._payload.feed_data(body, length) + except BaseException as exc: + if pyparser._payload_exception is not None: + pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) + else: + pyparser._payload.set_exception(exc) + pyparser._payload_error = 1 + return -1 + else: + return 0 + + +cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._started = False + pyparser._on_message_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_chunk_header() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_chunk_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef parser_error_from_errno(cparser.llhttp_t* parser): + cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser) + cdef bytes desc = cparser.llhttp_get_error_reason(parser) + + if errno in (cparser.HPE_CB_MESSAGE_BEGIN, + cparser.HPE_CB_HEADERS_COMPLETE, + cparser.HPE_CB_MESSAGE_COMPLETE, + cparser.HPE_CB_CHUNK_HEADER, + cparser.HPE_CB_CHUNK_COMPLETE, + cparser.HPE_INVALID_CONSTANT, + cparser.HPE_INVALID_HEADER_TOKEN, + cparser.HPE_INVALID_CONTENT_LENGTH, + cparser.HPE_INVALID_CHUNK_SIZE, + cparser.HPE_INVALID_EOF_STATE, + cparser.HPE_INVALID_TRANSFER_ENCODING): + cls = BadHttpMessage + + elif errno == cparser.HPE_INVALID_STATUS: + cls = BadStatusLine + + elif errno == cparser.HPE_INVALID_METHOD: + cls = BadStatusLine + + elif errno == cparser.HPE_INVALID_VERSION: + cls = BadStatusLine + + elif errno == cparser.HPE_INVALID_URL: + cls = InvalidURLError + + else: + cls = BadHttpMessage + + return cls(desc.decode('latin-1')) diff --git a/venv/lib/python3.10/site-packages/aiohttp/_http_writer.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/aiohttp/_http_writer.cpython-310-x86_64-linux-gnu.so new file mode 100755 index 0000000..7937e09 Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/_http_writer.cpython-310-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/_http_writer.pyx b/venv/lib/python3.10/site-packages/aiohttp/_http_writer.pyx new file mode 100644 index 0000000..eff8521 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/_http_writer.pyx @@ -0,0 +1,163 @@ +from cpython.bytes cimport PyBytes_FromStringAndSize +from cpython.exc cimport PyErr_NoMemory +from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc +from cpython.object cimport PyObject_Str +from libc.stdint cimport uint8_t, uint64_t +from libc.string cimport memcpy + +from multidict import istr + +DEF BUF_SIZE = 16 * 1024 # 16KiB +cdef char BUFFER[BUF_SIZE] + +cdef object _istr = istr + + +# ----------------- writer --------------------------- + +cdef struct Writer: + char *buf + Py_ssize_t size + Py_ssize_t pos + + +cdef inline void _init_writer(Writer* writer): + writer.buf = &BUFFER[0] + writer.size = BUF_SIZE + writer.pos = 0 + + +cdef inline void _release_writer(Writer* writer): + if writer.buf != BUFFER: + PyMem_Free(writer.buf) + + +cdef inline int _write_byte(Writer* writer, uint8_t ch): + cdef char * buf + cdef Py_ssize_t size + + if writer.pos == writer.size: + # reallocate + size = writer.size + BUF_SIZE + if writer.buf == BUFFER: + buf = PyMem_Malloc(size) + if buf == NULL: + PyErr_NoMemory() + return -1 + memcpy(buf, writer.buf, writer.size) + else: + buf = PyMem_Realloc(writer.buf, size) + if buf == NULL: + PyErr_NoMemory() + return -1 + writer.buf = buf + writer.size = size + writer.buf[writer.pos] = ch + writer.pos += 1 + return 0 + + +cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): + cdef uint64_t utf = symbol + + if utf < 0x80: + return _write_byte(writer, utf) + elif utf < 0x800: + if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + elif 0xD800 <= utf <= 0xDFFF: + # surogate pair, ignored + return 0 + elif utf < 0x10000: + if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: + return -1 + if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + elif utf > 0x10FFFF: + # symbol is too large + return 0 + else: + if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: + return -1 + if _write_byte(writer, + (0x80 | ((utf >> 12) & 0x3f))) < 0: + return -1 + if _write_byte(writer, + (0x80 | ((utf >> 6) & 0x3f))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + + +cdef inline int _write_str(Writer* writer, str s): + cdef Py_UCS4 ch + for ch in s: + if _write_utf8(writer, ch) < 0: + return -1 + + +# --------------- _serialize_headers ---------------------- + +cdef str to_str(object s): + typ = type(s) + if typ is str: + return s + elif typ is _istr: + return PyObject_Str(s) + elif not isinstance(s, str): + raise TypeError("Cannot serialize non-str key {!r}".format(s)) + else: + return str(s) + + +cdef void _safe_header(str string) except *: + if "\r" in string or "\n" in string: + raise ValueError( + "Newline or carriage return character detected in HTTP status message or " + "header. This is a potential security issue." + ) + + +def _serialize_headers(str status_line, headers): + cdef Writer writer + cdef object key + cdef object val + cdef bytes ret + + _init_writer(&writer) + + for key, val in headers.items(): + _safe_header(to_str(key)) + _safe_header(to_str(val)) + + try: + if _write_str(&writer, status_line) < 0: + raise + if _write_byte(&writer, b'\r') < 0: + raise + if _write_byte(&writer, b'\n') < 0: + raise + + for key, val in headers.items(): + if _write_str(&writer, to_str(key)) < 0: + raise + if _write_byte(&writer, b':') < 0: + raise + if _write_byte(&writer, b' ') < 0: + raise + if _write_str(&writer, to_str(val)) < 0: + raise + if _write_byte(&writer, b'\r') < 0: + raise + if _write_byte(&writer, b'\n') < 0: + raise + + if _write_byte(&writer, b'\r') < 0: + raise + if _write_byte(&writer, b'\n') < 0: + raise + + return PyBytes_FromStringAndSize(writer.buf, writer.pos) + finally: + _release_writer(&writer) diff --git a/venv/lib/python3.10/site-packages/aiohttp/_websocket.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/aiohttp/_websocket.cpython-310-x86_64-linux-gnu.so new file mode 100755 index 0000000..c4d3eaf Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiohttp/_websocket.cpython-310-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.10/site-packages/aiohttp/_websocket.pyx b/venv/lib/python3.10/site-packages/aiohttp/_websocket.pyx new file mode 100644 index 0000000..94318d2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/_websocket.pyx @@ -0,0 +1,56 @@ +from cpython cimport PyBytes_AsString + + +#from cpython cimport PyByteArray_AsString # cython still not exports that +cdef extern from "Python.h": + char* PyByteArray_AsString(bytearray ba) except NULL + +from libc.stdint cimport uint32_t, uint64_t, uintmax_t + + +def _websocket_mask_cython(object mask, object data): + """Note, this function mutates its `data` argument + """ + cdef: + Py_ssize_t data_len, i + # bit operations on signed integers are implementation-specific + unsigned char * in_buf + const unsigned char * mask_buf + uint32_t uint32_msk + uint64_t uint64_msk + + assert len(mask) == 4 + + if not isinstance(mask, bytes): + mask = bytes(mask) + + if isinstance(data, bytearray): + data = data + else: + data = bytearray(data) + + data_len = len(data) + in_buf = PyByteArray_AsString(data) + mask_buf = PyBytes_AsString(mask) + uint32_msk = (mask_buf)[0] + + # TODO: align in_data ptr to achieve even faster speeds + # does it need in python ?! malloc() always aligns to sizeof(long) bytes + + if sizeof(size_t) >= 8: + uint64_msk = uint32_msk + uint64_msk = (uint64_msk << 32) | uint32_msk + + while data_len >= 8: + (in_buf)[0] ^= uint64_msk + in_buf += 8 + data_len -= 8 + + + while data_len >= 4: + (in_buf)[0] ^= uint32_msk + in_buf += 4 + data_len -= 4 + + for i in range(0, data_len): + in_buf[i] ^= mask_buf[i] diff --git a/venv/lib/python3.10/site-packages/aiohttp/abc.py b/venv/lib/python3.10/site-packages/aiohttp/abc.py new file mode 100644 index 0000000..44a3bda --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/abc.py @@ -0,0 +1,207 @@ +import asyncio +import logging +from abc import ABC, abstractmethod +from collections.abc import Sized +from http.cookies import BaseCookie, Morsel +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Dict, + Generator, + Iterable, + List, + Optional, + Tuple, +) + +from multidict import CIMultiDict +from yarl import URL + +from .helpers import get_running_loop +from .typedefs import LooseCookies + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application + from .web_exceptions import HTTPException + from .web_request import BaseRequest, Request + from .web_response import StreamResponse +else: + BaseRequest = Request = Application = StreamResponse = None + HTTPException = None + + +class AbstractRouter(ABC): + def __init__(self) -> None: + self._frozen = False + + def post_init(self, app: Application) -> None: + """Post init stage. + + Not an abstract method for sake of backward compatibility, + but if the router wants to be aware of the application + it can override this. + """ + + @property + def frozen(self) -> bool: + return self._frozen + + def freeze(self) -> None: + """Freeze router.""" + self._frozen = True + + @abstractmethod + async def resolve(self, request: Request) -> "AbstractMatchInfo": + """Return MATCH_INFO for given request""" + + +class AbstractMatchInfo(ABC): + @property # pragma: no branch + @abstractmethod + def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]: + """Execute matched request handler""" + + @property + @abstractmethod + def expect_handler(self) -> Callable[[Request], Awaitable[None]]: + """Expect handler for 100-continue processing""" + + @property # pragma: no branch + @abstractmethod + def http_exception(self) -> Optional[HTTPException]: + """HTTPException instance raised on router's resolving, or None""" + + @abstractmethod # pragma: no branch + def get_info(self) -> Dict[str, Any]: + """Return a dict with additional info useful for introspection""" + + @property # pragma: no branch + @abstractmethod + def apps(self) -> Tuple[Application, ...]: + """Stack of nested applications. + + Top level application is left-most element. + + """ + + @abstractmethod + def add_app(self, app: Application) -> None: + """Add application to the nested apps stack.""" + + @abstractmethod + def freeze(self) -> None: + """Freeze the match info. + + The method is called after route resolution. + + After the call .add_app() is forbidden. + + """ + + +class AbstractView(ABC): + """Abstract class based view.""" + + def __init__(self, request: Request) -> None: + self._request = request + + @property + def request(self) -> Request: + """Request instance.""" + return self._request + + @abstractmethod + def __await__(self) -> Generator[Any, None, StreamResponse]: + """Execute the view handler.""" + + +class AbstractResolver(ABC): + """Abstract DNS resolver.""" + + @abstractmethod + async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]: + """Return IP address for given hostname""" + + @abstractmethod + async def close(self) -> None: + """Release resolver""" + + +if TYPE_CHECKING: # pragma: no cover + IterableBase = Iterable[Morsel[str]] +else: + IterableBase = Iterable + + +ClearCookiePredicate = Callable[["Morsel[str]"], bool] + + +class AbstractCookieJar(Sized, IterableBase): + """Abstract Cookie Jar.""" + + def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + self._loop = get_running_loop(loop) + + @abstractmethod + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + """Clear all cookies if no predicate is passed.""" + + @abstractmethod + def clear_domain(self, domain: str) -> None: + """Clear all cookies for domain and all subdomains.""" + + @abstractmethod + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + """Update cookies.""" + + @abstractmethod + def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": + """Return the jar's cookies filtered by their attributes.""" + + +class AbstractStreamWriter(ABC): + """Abstract stream writer.""" + + buffer_size = 0 + output_size = 0 + length: Optional[int] = 0 + + @abstractmethod + async def write(self, chunk: bytes) -> None: + """Write chunk into stream.""" + + @abstractmethod + async def write_eof(self, chunk: bytes = b"") -> None: + """Write last chunk.""" + + @abstractmethod + async def drain(self) -> None: + """Flush the write buffer.""" + + @abstractmethod + def enable_compression(self, encoding: str = "deflate") -> None: + """Enable HTTP body compression""" + + @abstractmethod + def enable_chunking(self) -> None: + """Enable HTTP chunked mode""" + + @abstractmethod + async def write_headers( + self, status_line: str, headers: "CIMultiDict[str]" + ) -> None: + """Write HTTP headers""" + + +class AbstractAccessLogger(ABC): + """Abstract writer to access log.""" + + def __init__(self, logger: logging.Logger, log_format: str) -> None: + self.logger = logger + self.log_format = log_format + + @abstractmethod + def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: + """Emit log to logger.""" diff --git a/venv/lib/python3.10/site-packages/aiohttp/base_protocol.py b/venv/lib/python3.10/site-packages/aiohttp/base_protocol.py new file mode 100644 index 0000000..4c9f0a7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/base_protocol.py @@ -0,0 +1,90 @@ +import asyncio +from typing import Optional, cast + +from .tcp_helpers import tcp_nodelay + + +class BaseProtocol(asyncio.Protocol): + __slots__ = ( + "_loop", + "_paused", + "_drain_waiter", + "_connection_lost", + "_reading_paused", + "transport", + ) + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop: asyncio.AbstractEventLoop = loop + self._paused = False + self._drain_waiter: Optional[asyncio.Future[None]] = None + self._reading_paused = False + + self.transport: Optional[asyncio.Transport] = None + + @property + def connected(self) -> bool: + """Return True if the connection is open.""" + return self.transport is not None + + def pause_writing(self) -> None: + assert not self._paused + self._paused = True + + def resume_writing(self) -> None: + assert self._paused + self._paused = False + + waiter = self._drain_waiter + if waiter is not None: + self._drain_waiter = None + if not waiter.done(): + waiter.set_result(None) + + def pause_reading(self) -> None: + if not self._reading_paused and self.transport is not None: + try: + self.transport.pause_reading() + except (AttributeError, NotImplementedError, RuntimeError): + pass + self._reading_paused = True + + def resume_reading(self) -> None: + if self._reading_paused and self.transport is not None: + try: + self.transport.resume_reading() + except (AttributeError, NotImplementedError, RuntimeError): + pass + self._reading_paused = False + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + tr = cast(asyncio.Transport, transport) + tcp_nodelay(tr, True) + self.transport = tr + + def connection_lost(self, exc: Optional[BaseException]) -> None: + # Wake up the writer if currently paused. + self.transport = None + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + return + self._drain_waiter = None + if waiter.done(): + return + if exc is None: + waiter.set_result(None) + else: + waiter.set_exception(exc) + + async def _drain_helper(self) -> None: + if not self.connected: + raise ConnectionResetError("Connection lost") + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + waiter = self._loop.create_future() + self._drain_waiter = waiter + await asyncio.shield(waiter) diff --git a/venv/lib/python3.10/site-packages/aiohttp/client.py b/venv/lib/python3.10/site-packages/aiohttp/client.py new file mode 100644 index 0000000..0d0f4c1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/client.py @@ -0,0 +1,1305 @@ +"""HTTP Client for asyncio.""" + +import asyncio +import base64 +import hashlib +import json +import os +import sys +import traceback +import warnings +from contextlib import suppress +from types import SimpleNamespace, TracebackType +from typing import ( + Any, + Awaitable, + Callable, + Coroutine, + FrozenSet, + Generator, + Generic, + Iterable, + List, + Mapping, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +import attr +from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr +from yarl import URL + +from . import hdrs, http, payload +from .abc import AbstractCookieJar +from .client_exceptions import ( + ClientConnectionError as ClientConnectionError, + ClientConnectorCertificateError as ClientConnectorCertificateError, + ClientConnectorError as ClientConnectorError, + ClientConnectorSSLError as ClientConnectorSSLError, + ClientError as ClientError, + ClientHttpProxyError as ClientHttpProxyError, + ClientOSError as ClientOSError, + ClientPayloadError as ClientPayloadError, + ClientProxyConnectionError as ClientProxyConnectionError, + ClientResponseError as ClientResponseError, + ClientSSLError as ClientSSLError, + ContentTypeError as ContentTypeError, + InvalidURL as InvalidURL, + ServerConnectionError as ServerConnectionError, + ServerDisconnectedError as ServerDisconnectedError, + ServerFingerprintMismatch as ServerFingerprintMismatch, + ServerTimeoutError as ServerTimeoutError, + TooManyRedirects as TooManyRedirects, + WSServerHandshakeError as WSServerHandshakeError, +) +from .client_reqrep import ( + ClientRequest as ClientRequest, + ClientResponse as ClientResponse, + Fingerprint as Fingerprint, + RequestInfo as RequestInfo, + _merge_ssl_params, +) +from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse +from .connector import ( + BaseConnector as BaseConnector, + NamedPipeConnector as NamedPipeConnector, + TCPConnector as TCPConnector, + UnixConnector as UnixConnector, +) +from .cookiejar import CookieJar +from .helpers import ( + DEBUG, + PY_36, + BasicAuth, + TimeoutHandle, + ceil_timeout, + get_env_proxy_for_url, + get_running_loop, + sentinel, + strip_auth_from_url, +) +from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter +from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse +from .streams import FlowControlDataQueue +from .tracing import Trace, TraceConfig +from .typedefs import Final, JSONEncoder, LooseCookies, LooseHeaders, StrOrURL + +__all__ = ( + # client_exceptions + "ClientConnectionError", + "ClientConnectorCertificateError", + "ClientConnectorError", + "ClientConnectorSSLError", + "ClientError", + "ClientHttpProxyError", + "ClientOSError", + "ClientPayloadError", + "ClientProxyConnectionError", + "ClientResponseError", + "ClientSSLError", + "ContentTypeError", + "InvalidURL", + "ServerConnectionError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ServerTimeoutError", + "TooManyRedirects", + "WSServerHandshakeError", + # client_reqrep + "ClientRequest", + "ClientResponse", + "Fingerprint", + "RequestInfo", + # connector + "BaseConnector", + "TCPConnector", + "UnixConnector", + "NamedPipeConnector", + # client_ws + "ClientWebSocketResponse", + # client + "ClientSession", + "ClientTimeout", + "request", +) + + +try: + from ssl import SSLContext +except ImportError: # pragma: no cover + SSLContext = object # type: ignore[misc,assignment] + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ClientTimeout: + total: Optional[float] = None + connect: Optional[float] = None + sock_read: Optional[float] = None + sock_connect: Optional[float] = None + + # pool_queue_timeout: Optional[float] = None + # dns_resolution_timeout: Optional[float] = None + # socket_connect_timeout: Optional[float] = None + # connection_acquiring_timeout: Optional[float] = None + # new_connection_timeout: Optional[float] = None + # http_header_timeout: Optional[float] = None + # response_body_timeout: Optional[float] = None + + # to create a timeout specific for a single request, either + # - create a completely new one to overwrite the default + # - or use http://www.attrs.org/en/stable/api.html#attr.evolve + # to overwrite the defaults + + +# 5 Minute default read timeout +DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60) + +_RetType = TypeVar("_RetType") + + +class ClientSession: + """First-class interface for making HTTP requests.""" + + ATTRS = frozenset( + [ + "_base_url", + "_source_traceback", + "_connector", + "requote_redirect_url", + "_loop", + "_cookie_jar", + "_connector_owner", + "_default_auth", + "_version", + "_json_serialize", + "_requote_redirect_url", + "_timeout", + "_raise_for_status", + "_auto_decompress", + "_trust_env", + "_default_headers", + "_skip_auto_headers", + "_request_class", + "_response_class", + "_ws_response_class", + "_trace_configs", + "_read_bufsize", + ] + ) + + _source_traceback = None # type: Optional[traceback.StackSummary] + _connector = None # type: Optional[BaseConnector] + + def __init__( + self, + base_url: Optional[StrOrURL] = None, + *, + connector: Optional[BaseConnector] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + cookies: Optional[LooseCookies] = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + json_serialize: JSONEncoder = json.dumps, + request_class: Type[ClientRequest] = ClientRequest, + response_class: Type[ClientResponse] = ClientResponse, + ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse, + version: HttpVersion = http.HttpVersion11, + cookie_jar: Optional[AbstractCookieJar] = None, + connector_owner: bool = True, + raise_for_status: bool = False, + read_timeout: Union[float, object] = sentinel, + conn_timeout: Optional[float] = None, + timeout: Union[object, ClientTimeout] = sentinel, + auto_decompress: bool = True, + trust_env: bool = False, + requote_redirect_url: bool = True, + trace_configs: Optional[List[TraceConfig]] = None, + read_bufsize: int = 2**16, + ) -> None: + if loop is None: + if connector is not None: + loop = connector._loop + + loop = get_running_loop(loop) + + if base_url is None or isinstance(base_url, URL): + self._base_url: Optional[URL] = base_url + else: + self._base_url = URL(base_url) + assert ( + self._base_url.origin() == self._base_url + ), "Only absolute URLs without path part are supported" + + if connector is None: + connector = TCPConnector(loop=loop) + + if connector._loop is not loop: + raise RuntimeError("Session and connector has to use same event loop") + + self._loop = loop + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + if cookie_jar is None: + cookie_jar = CookieJar(loop=loop) + self._cookie_jar = cookie_jar + + if cookies is not None: + self._cookie_jar.update_cookies(cookies) + + self._connector = connector + self._connector_owner = connector_owner + self._default_auth = auth + self._version = version + self._json_serialize = json_serialize + if timeout is sentinel: + self._timeout = DEFAULT_TIMEOUT + if read_timeout is not sentinel: + warnings.warn( + "read_timeout is deprecated, " "use timeout argument instead", + DeprecationWarning, + stacklevel=2, + ) + self._timeout = attr.evolve(self._timeout, total=read_timeout) + if conn_timeout is not None: + self._timeout = attr.evolve(self._timeout, connect=conn_timeout) + warnings.warn( + "conn_timeout is deprecated, " "use timeout argument instead", + DeprecationWarning, + stacklevel=2, + ) + else: + self._timeout = timeout # type: ignore[assignment] + if read_timeout is not sentinel: + raise ValueError( + "read_timeout and timeout parameters " + "conflict, please setup " + "timeout.read" + ) + if conn_timeout is not None: + raise ValueError( + "conn_timeout and timeout parameters " + "conflict, please setup " + "timeout.connect" + ) + self._raise_for_status = raise_for_status + self._auto_decompress = auto_decompress + self._trust_env = trust_env + self._requote_redirect_url = requote_redirect_url + self._read_bufsize = read_bufsize + + # Convert to list of tuples + if headers: + real_headers: CIMultiDict[str] = CIMultiDict(headers) + else: + real_headers = CIMultiDict() + self._default_headers: CIMultiDict[str] = real_headers + if skip_auto_headers is not None: + self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers) + else: + self._skip_auto_headers = frozenset() + + self._request_class = request_class + self._response_class = response_class + self._ws_response_class = ws_response_class + + self._trace_configs = trace_configs or [] + for trace_config in self._trace_configs: + trace_config.freeze() + + def __init_subclass__(cls: Type["ClientSession"]) -> None: + warnings.warn( + "Inheritance class {} from ClientSession " + "is discouraged".format(cls.__name__), + DeprecationWarning, + stacklevel=2, + ) + + if DEBUG: + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom ClientSession.{} attribute " + "is discouraged".format(name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + def __del__(self, _warnings: Any = warnings) -> None: + if not self.closed: + if PY_36: + kwargs = {"source": self} + else: + kwargs = {} + _warnings.warn( + f"Unclosed client session {self!r}", ResourceWarning, **kwargs + ) + context = {"client_session": self, "message": "Unclosed client session"} + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def request( + self, method: str, url: StrOrURL, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP request.""" + return _RequestContextManager(self._request(method, url, **kwargs)) + + def _build_url(self, str_or_url: StrOrURL) -> URL: + url = URL(str_or_url) + if self._base_url is None: + return url + else: + assert not url.is_absolute() and url.path.startswith("/") + return self._base_url.join(url) + + async def _request( + self, + method: str, + str_or_url: StrOrURL, + *, + params: Optional[Mapping[str, str]] = None, + data: Any = None, + json: Any = None, + cookies: Optional[LooseCookies] = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + allow_redirects: bool = True, + max_redirects: int = 10, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + raise_for_status: Optional[bool] = None, + read_until_eof: bool = True, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + timeout: Union[ClientTimeout, object] = sentinel, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + ssl: Optional[Union[SSLContext, bool, Fingerprint]] = None, + proxy_headers: Optional[LooseHeaders] = None, + trace_request_ctx: Optional[SimpleNamespace] = None, + read_bufsize: Optional[int] = None, + ) -> ClientResponse: + + # NOTE: timeout clamps existing connect and read timeouts. We cannot + # set the default to None because we need to detect if the user wants + # to use the existing timeouts by setting timeout to None. + + if self.closed: + raise RuntimeError("Session is closed") + + ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + + if data is not None and json is not None: + raise ValueError( + "data and json parameters can not be used at the same time" + ) + elif json is not None: + data = payload.JsonPayload(json, dumps=self._json_serialize) + + if not isinstance(chunked, bool) and chunked is not None: + warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) + + redirects = 0 + history = [] + version = self._version + + # Merge with default headers and transform to CIMultiDict + headers = self._prepare_headers(headers) + proxy_headers = self._prepare_headers(proxy_headers) + + try: + url = self._build_url(str_or_url) + except ValueError as e: + raise InvalidURL(str_or_url) from e + + skip_headers = set(self._skip_auto_headers) + if skip_auto_headers is not None: + for i in skip_auto_headers: + skip_headers.add(istr(i)) + + if proxy is not None: + try: + proxy = URL(proxy) + except ValueError as e: + raise InvalidURL(proxy) from e + + if timeout is sentinel: + real_timeout: ClientTimeout = self._timeout + else: + if not isinstance(timeout, ClientTimeout): + real_timeout = ClientTimeout(total=timeout) # type: ignore[arg-type] + else: + real_timeout = timeout + # timeout is cumulative for all request operations + # (request, redirects, responses, data consuming) + tm = TimeoutHandle(self._loop, real_timeout.total) + handle = tm.start() + + if read_bufsize is None: + read_bufsize = self._read_bufsize + + traces = [ + Trace( + self, + trace_config, + trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx), + ) + for trace_config in self._trace_configs + ] + + for trace in traces: + await trace.send_request_start(method, url.update_query(params), headers) + + timer = tm.timer() + try: + with timer: + while True: + url, auth_from_url = strip_auth_from_url(url) + if auth and auth_from_url: + raise ValueError( + "Cannot combine AUTH argument with " + "credentials encoded in URL" + ) + + if auth is None: + auth = auth_from_url + if auth is None: + auth = self._default_auth + # It would be confusing if we support explicit + # Authorization header with auth argument + if ( + headers is not None + and auth is not None + and hdrs.AUTHORIZATION in headers + ): + raise ValueError( + "Cannot combine AUTHORIZATION header " + "with AUTH argument or credentials " + "encoded in URL" + ) + + all_cookies = self._cookie_jar.filter_cookies(url) + + if cookies is not None: + tmp_cookie_jar = CookieJar() + tmp_cookie_jar.update_cookies(cookies) + req_cookies = tmp_cookie_jar.filter_cookies(url) + if req_cookies: + all_cookies.load(req_cookies) + + if proxy is not None: + proxy = URL(proxy) + elif self._trust_env: + with suppress(LookupError): + proxy, proxy_auth = get_env_proxy_for_url(url) + + req = self._request_class( + method, + url, + params=params, + headers=headers, + skip_auto_headers=skip_headers, + data=data, + cookies=all_cookies, + auth=auth, + version=version, + compress=compress, + chunked=chunked, + expect100=expect100, + loop=self._loop, + response_class=self._response_class, + proxy=proxy, + proxy_auth=proxy_auth, + timer=timer, + session=self, + ssl=ssl, + proxy_headers=proxy_headers, + traces=traces, + ) + + # connection timeout + try: + async with ceil_timeout(real_timeout.connect): + assert self._connector is not None + conn = await self._connector.connect( + req, traces=traces, timeout=real_timeout + ) + except asyncio.TimeoutError as exc: + raise ServerTimeoutError( + "Connection timeout " "to host {}".format(url) + ) from exc + + assert conn.transport is not None + + assert conn.protocol is not None + conn.protocol.set_response_params( + timer=timer, + skip_payload=method.upper() == "HEAD", + read_until_eof=read_until_eof, + auto_decompress=self._auto_decompress, + read_timeout=real_timeout.sock_read, + read_bufsize=read_bufsize, + ) + + try: + try: + resp = await req.send(conn) + try: + await resp.start(conn) + except BaseException: + resp.close() + raise + except BaseException: + conn.close() + raise + except ClientError: + raise + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise ClientOSError(*exc.args) from exc + + self._cookie_jar.update_cookies(resp.cookies, resp.url) + + # redirects + if resp.status in (301, 302, 303, 307, 308) and allow_redirects: + + for trace in traces: + await trace.send_request_redirect( + method, url.update_query(params), headers, resp + ) + + redirects += 1 + history.append(resp) + if max_redirects and redirects >= max_redirects: + resp.close() + raise TooManyRedirects( + history[0].request_info, tuple(history) + ) + + # For 301 and 302, mimic IE, now changed in RFC + # https://github.com/kennethreitz/requests/pull/269 + if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or ( + resp.status in (301, 302) and resp.method == hdrs.METH_POST + ): + method = hdrs.METH_GET + data = None + if headers.get(hdrs.CONTENT_LENGTH): + headers.pop(hdrs.CONTENT_LENGTH) + + r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get( + hdrs.URI + ) + if r_url is None: + # see github.com/aio-libs/aiohttp/issues/2022 + break + else: + # reading from correct redirection + # response is forbidden + resp.release() + + try: + parsed_url = URL( + r_url, encoded=not self._requote_redirect_url + ) + + except ValueError as e: + raise InvalidURL(r_url) from e + + scheme = parsed_url.scheme + if scheme not in ("http", "https", ""): + resp.close() + raise ValueError("Can redirect only to http or https") + elif not scheme: + parsed_url = url.join(parsed_url) + + if url.origin() != parsed_url.origin(): + auth = None + headers.pop(hdrs.AUTHORIZATION, None) + + url = parsed_url + params = None + resp.release() + continue + + break + + # check response status + if raise_for_status is None: + raise_for_status = self._raise_for_status + if raise_for_status: + resp.raise_for_status() + + # register connection + if handle is not None: + if resp.connection is not None: + resp.connection.add_callback(handle.cancel) + else: + handle.cancel() + + resp._history = tuple(history) + + for trace in traces: + await trace.send_request_end( + method, url.update_query(params), headers, resp + ) + return resp + + except BaseException as e: + # cleanup timer + tm.close() + if handle: + handle.cancel() + handle = None + + for trace in traces: + await trace.send_request_exception( + method, url.update_query(params), headers, e + ) + raise + + def ws_connect( + self, + url: StrOrURL, + *, + method: str = hdrs.METH_GET, + protocols: Iterable[str] = (), + timeout: float = 10.0, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + auth: Optional[BasicAuth] = None, + origin: Optional[str] = None, + params: Optional[Mapping[str, str]] = None, + headers: Optional[LooseHeaders] = None, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + ssl: Union[SSLContext, bool, None, Fingerprint] = None, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + proxy_headers: Optional[LooseHeaders] = None, + compress: int = 0, + max_msg_size: int = 4 * 1024 * 1024, + ) -> "_WSRequestContextManager": + """Initiate websocket connection.""" + return _WSRequestContextManager( + self._ws_connect( + url, + method=method, + protocols=protocols, + timeout=timeout, + receive_timeout=receive_timeout, + autoclose=autoclose, + autoping=autoping, + heartbeat=heartbeat, + auth=auth, + origin=origin, + params=params, + headers=headers, + proxy=proxy, + proxy_auth=proxy_auth, + ssl=ssl, + verify_ssl=verify_ssl, + fingerprint=fingerprint, + ssl_context=ssl_context, + proxy_headers=proxy_headers, + compress=compress, + max_msg_size=max_msg_size, + ) + ) + + async def _ws_connect( + self, + url: StrOrURL, + *, + method: str = hdrs.METH_GET, + protocols: Iterable[str] = (), + timeout: float = 10.0, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + auth: Optional[BasicAuth] = None, + origin: Optional[str] = None, + params: Optional[Mapping[str, str]] = None, + headers: Optional[LooseHeaders] = None, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + ssl: Union[SSLContext, bool, None, Fingerprint] = None, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + proxy_headers: Optional[LooseHeaders] = None, + compress: int = 0, + max_msg_size: int = 4 * 1024 * 1024, + ) -> ClientWebSocketResponse: + + if headers is None: + real_headers: CIMultiDict[str] = CIMultiDict() + else: + real_headers = CIMultiDict(headers) + + default_headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_VERSION: "13", + } + + for key, value in default_headers.items(): + real_headers.setdefault(key, value) + + sec_key = base64.b64encode(os.urandom(16)) + real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() + + if protocols: + real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols) + if origin is not None: + real_headers[hdrs.ORIGIN] = origin + if compress: + extstr = ws_ext_gen(compress=compress) + real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr + + ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + + # send request + resp = await self.request( + method, + url, + params=params, + headers=real_headers, + read_until_eof=False, + auth=auth, + proxy=proxy, + proxy_auth=proxy_auth, + ssl=ssl, + proxy_headers=proxy_headers, + ) + + try: + # check handshake + if resp.status != 101: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid response status", + status=resp.status, + headers=resp.headers, + ) + + if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket": + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid upgrade header", + status=resp.status, + headers=resp.headers, + ) + + if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade": + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid connection header", + status=resp.status, + headers=resp.headers, + ) + + # key calculation + r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "") + match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode() + if r_key != match: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid challenge response", + status=resp.status, + headers=resp.headers, + ) + + # websocket protocol + protocol = None + if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: + resp_protocols = [ + proto.strip() + for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") + ] + + for proto in resp_protocols: + if proto in protocols: + protocol = proto + break + + # websocket compress + notakeover = False + if compress: + compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) + if compress_hdrs: + try: + compress, notakeover = ws_ext_parse(compress_hdrs) + except WSHandshakeError as exc: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message=exc.args[0], + status=resp.status, + headers=resp.headers, + ) from exc + else: + compress = 0 + notakeover = False + + conn = resp.connection + assert conn is not None + conn_proto = conn.protocol + assert conn_proto is not None + transport = conn.transport + assert transport is not None + reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue( + conn_proto, 2**16, loop=self._loop + ) + conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader) + writer = WebSocketWriter( + conn_proto, + transport, + use_mask=True, + compress=compress, + notakeover=notakeover, + ) + except BaseException: + resp.close() + raise + else: + return self._ws_response_class( + reader, + writer, + protocol, + resp, + timeout, + autoclose, + autoping, + self._loop, + receive_timeout=receive_timeout, + heartbeat=heartbeat, + compress=compress, + client_notakeover=notakeover, + ) + + def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]": + """Add default headers and transform it to CIMultiDict""" + # Convert headers to MultiDict + result = CIMultiDict(self._default_headers) + if headers: + if not isinstance(headers, (MultiDictProxy, MultiDict)): + headers = CIMultiDict(headers) + added_names: Set[str] = set() + for key, value in headers.items(): + if key in added_names: + result.add(key, value) + else: + result[key] = value + added_names.add(key) + return result + + def get( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP GET request.""" + return _RequestContextManager( + self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs) + ) + + def options( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP OPTIONS request.""" + return _RequestContextManager( + self._request( + hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs + ) + ) + + def head( + self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP HEAD request.""" + return _RequestContextManager( + self._request( + hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs + ) + ) + + def post( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP POST request.""" + return _RequestContextManager( + self._request(hdrs.METH_POST, url, data=data, **kwargs) + ) + + def put( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PUT request.""" + return _RequestContextManager( + self._request(hdrs.METH_PUT, url, data=data, **kwargs) + ) + + def patch( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, url, data=data, **kwargs) + ) + + def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager": + """Perform HTTP DELETE request.""" + return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs)) + + async def close(self) -> None: + """Close underlying connector. + + Release all acquired resources. + """ + if not self.closed: + if self._connector is not None and self._connector_owner: + await self._connector.close() + self._connector = None + + @property + def closed(self) -> bool: + """Is client session closed. + + A readonly property. + """ + return self._connector is None or self._connector.closed + + @property + def connector(self) -> Optional[BaseConnector]: + """Connector instance used for the session.""" + return self._connector + + @property + def cookie_jar(self) -> AbstractCookieJar: + """The session cookies.""" + return self._cookie_jar + + @property + def version(self) -> Tuple[int, int]: + """The session HTTP protocol version.""" + return self._version + + @property + def requote_redirect_url(self) -> bool: + """Do URL requoting on redirection handling.""" + return self._requote_redirect_url + + @requote_redirect_url.setter + def requote_redirect_url(self, val: bool) -> None: + """Do URL requoting on redirection handling.""" + warnings.warn( + "session.requote_redirect_url modification " "is deprecated #2778", + DeprecationWarning, + stacklevel=2, + ) + self._requote_redirect_url = val + + @property + def loop(self) -> asyncio.AbstractEventLoop: + """Session's loop.""" + warnings.warn( + "client.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + @property + def timeout(self) -> ClientTimeout: + """Timeout for the session.""" + return self._timeout + + @property + def headers(self) -> "CIMultiDict[str]": + """The default headers of the client session.""" + return self._default_headers + + @property + def skip_auto_headers(self) -> FrozenSet[istr]: + """Headers for which autogeneration should be skipped""" + return self._skip_auto_headers + + @property + def auth(self) -> Optional[BasicAuth]: + """An object that represents HTTP Basic Authorization""" + return self._default_auth + + @property + def json_serialize(self) -> JSONEncoder: + """Json serializer callable""" + return self._json_serialize + + @property + def connector_owner(self) -> bool: + """Should connector be closed on session closing""" + return self._connector_owner + + @property + def raise_for_status( + self, + ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]: + """Should `ClientResponse.raise_for_status()` be called for each response.""" + return self._raise_for_status + + @property + def auto_decompress(self) -> bool: + """Should the body response be automatically decompressed.""" + return self._auto_decompress + + @property + def trust_env(self) -> bool: + """ + Should proxies information from environment or netrc be trusted. + + Information is from HTTP_PROXY / HTTPS_PROXY environment variables + or ~/.netrc file if present. + """ + return self._trust_env + + @property + def trace_configs(self) -> List[TraceConfig]: + """A list of TraceConfig instances used for client tracing""" + return self._trace_configs + + def detach(self) -> None: + """Detach connector from session without closing the former. + + Session is switched to closed state anyway. + """ + self._connector = None + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "ClientSession": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + await self.close() + + +class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]): + + __slots__ = ("_coro", "_resp") + + def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None: + self._coro = coro + + def send(self, arg: None) -> "asyncio.Future[Any]": + return self._coro.send(arg) + + def throw(self, arg: BaseException) -> None: # type: ignore[arg-type,override] + self._coro.throw(arg) + + def close(self) -> None: + return self._coro.close() + + def __await__(self) -> Generator[Any, None, _RetType]: + ret = self._coro.__await__() + return ret + + def __iter__(self) -> Generator[Any, None, _RetType]: + return self.__await__() + + async def __aenter__(self) -> _RetType: + self._resp = await self._coro + return self._resp + + +class _RequestContextManager(_BaseRequestContextManager[ClientResponse]): + __slots__ = () + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + # We're basing behavior on the exception as it can be caused by + # user code unrelated to the status of the connection. If you + # would like to close a connection you must do that + # explicitly. Otherwise connection error handling should kick in + # and close/recycle the connection as required. + self._resp.release() + + +class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]): + __slots__ = () + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + await self._resp.close() + + +class _SessionRequestContextManager: + + __slots__ = ("_coro", "_resp", "_session") + + def __init__( + self, + coro: Coroutine["asyncio.Future[Any]", None, ClientResponse], + session: ClientSession, + ) -> None: + self._coro = coro + self._resp: Optional[ClientResponse] = None + self._session = session + + async def __aenter__(self) -> ClientResponse: + try: + self._resp = await self._coro + except BaseException: + await self._session.close() + raise + else: + return self._resp + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + assert self._resp is not None + self._resp.close() + await self._session.close() + + +def request( + method: str, + url: StrOrURL, + *, + params: Optional[Mapping[str, str]] = None, + data: Any = None, + json: Any = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + allow_redirects: bool = True, + max_redirects: int = 10, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + raise_for_status: Optional[bool] = None, + read_until_eof: bool = True, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + timeout: Union[ClientTimeout, object] = sentinel, + cookies: Optional[LooseCookies] = None, + version: HttpVersion = http.HttpVersion11, + connector: Optional[BaseConnector] = None, + read_bufsize: Optional[int] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> _SessionRequestContextManager: + """Constructs and sends a request. + + Returns response object. + method - HTTP method + url - request url + params - (optional) Dictionary or bytes to be sent in the query + string of the new request + data - (optional) Dictionary, bytes, or file-like object to + send in the body of the request + json - (optional) Any json compatible python object + headers - (optional) Dictionary of HTTP Headers to send with + the request + cookies - (optional) Dict object to send with the request + auth - (optional) BasicAuth named tuple represent HTTP Basic Auth + auth - aiohttp.helpers.BasicAuth + allow_redirects - (optional) If set to False, do not follow + redirects + version - Request HTTP version. + compress - Set to True if request has to be compressed + with deflate encoding. + chunked - Set to chunk size for chunked transfer encoding. + expect100 - Expect 100-continue response from server. + connector - BaseConnector sub-class instance to support + connection pooling. + read_until_eof - Read response until eof if response + does not have Content-Length header. + loop - Optional event loop. + timeout - Optional ClientTimeout settings structure, 5min + total timeout by default. + Usage:: + >>> import aiohttp + >>> resp = await aiohttp.request('GET', 'http://python.org/') + >>> resp + + >>> data = await resp.read() + """ + connector_owner = False + if connector is None: + connector_owner = True + connector = TCPConnector(loop=loop, force_close=True) + + session = ClientSession( + loop=loop, + cookies=cookies, + version=version, + timeout=timeout, + connector=connector, + connector_owner=connector_owner, + ) + + return _SessionRequestContextManager( + session._request( + method, + url, + params=params, + data=data, + json=json, + headers=headers, + skip_auto_headers=skip_auto_headers, + auth=auth, + allow_redirects=allow_redirects, + max_redirects=max_redirects, + compress=compress, + chunked=chunked, + expect100=expect100, + raise_for_status=raise_for_status, + read_until_eof=read_until_eof, + proxy=proxy, + proxy_auth=proxy_auth, + read_bufsize=read_bufsize, + ), + session, + ) diff --git a/venv/lib/python3.10/site-packages/aiohttp/client_exceptions.py b/venv/lib/python3.10/site-packages/aiohttp/client_exceptions.py new file mode 100644 index 0000000..c640e1e --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/client_exceptions.py @@ -0,0 +1,342 @@ +"""HTTP related errors.""" + +import asyncio +import warnings +from typing import TYPE_CHECKING, Any, Optional, Tuple, Union + +from .http_parser import RawResponseMessage +from .typedefs import LooseHeaders + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = SSLContext = None # type: ignore[assignment] + + +if TYPE_CHECKING: # pragma: no cover + from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo +else: + RequestInfo = ClientResponse = ConnectionKey = None + +__all__ = ( + "ClientError", + "ClientConnectionError", + "ClientOSError", + "ClientConnectorError", + "ClientProxyConnectionError", + "ClientSSLError", + "ClientConnectorSSLError", + "ClientConnectorCertificateError", + "ServerConnectionError", + "ServerTimeoutError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ClientResponseError", + "ClientHttpProxyError", + "WSServerHandshakeError", + "ContentTypeError", + "ClientPayloadError", + "InvalidURL", +) + + +class ClientError(Exception): + """Base class for client connection errors.""" + + +class ClientResponseError(ClientError): + """Connection error during reading response. + + request_info: instance of RequestInfo + """ + + def __init__( + self, + request_info: RequestInfo, + history: Tuple[ClientResponse, ...], + *, + code: Optional[int] = None, + status: Optional[int] = None, + message: str = "", + headers: Optional[LooseHeaders] = None, + ) -> None: + self.request_info = request_info + if code is not None: + if status is not None: + raise ValueError( + "Both code and status arguments are provided; " + "code is deprecated, use status instead" + ) + warnings.warn( + "code argument is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + if status is not None: + self.status = status + elif code is not None: + self.status = code + else: + self.status = 0 + self.message = message + self.headers = headers + self.history = history + self.args = (request_info, history) + + def __str__(self) -> str: + return "{}, message={!r}, url={!r}".format( + self.status, + self.message, + self.request_info.real_url, + ) + + def __repr__(self) -> str: + args = f"{self.request_info!r}, {self.history!r}" + if self.status != 0: + args += f", status={self.status!r}" + if self.message != "": + args += f", message={self.message!r}" + if self.headers is not None: + args += f", headers={self.headers!r}" + return f"{type(self).__name__}({args})" + + @property + def code(self) -> int: + warnings.warn( + "code property is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + return self.status + + @code.setter + def code(self, value: int) -> None: + warnings.warn( + "code property is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + self.status = value + + +class ContentTypeError(ClientResponseError): + """ContentType found is not valid.""" + + +class WSServerHandshakeError(ClientResponseError): + """websocket server handshake error.""" + + +class ClientHttpProxyError(ClientResponseError): + """HTTP proxy error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + proxy responds with status other than ``200 OK`` + on ``CONNECT`` request. + """ + + +class TooManyRedirects(ClientResponseError): + """Client was redirected too many times.""" + + +class ClientConnectionError(ClientError): + """Base class for client socket errors.""" + + +class ClientOSError(ClientConnectionError, OSError): + """OSError error.""" + + +class ClientConnectorError(ClientOSError): + """Client connector error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + a connection can not be established. + """ + + def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None: + self._conn_key = connection_key + self._os_error = os_error + super().__init__(os_error.errno, os_error.strerror) + self.args = (connection_key, os_error) + + @property + def os_error(self) -> OSError: + return self._os_error + + @property + def host(self) -> str: + return self._conn_key.host + + @property + def port(self) -> Optional[int]: + return self._conn_key.port + + @property + def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]: + return self._conn_key.ssl + + def __str__(self) -> str: + return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format( + self, self.ssl if self.ssl is not None else "default", self.strerror + ) + + # OSError.__reduce__ does too much black magick + __reduce__ = BaseException.__reduce__ + + +class ClientProxyConnectionError(ClientConnectorError): + """Proxy connection error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + connection to proxy can not be established. + """ + + +class UnixClientConnectorError(ClientConnectorError): + """Unix connector error. + + Raised in :py:class:`aiohttp.connector.UnixConnector` + if connection to unix socket can not be established. + """ + + def __init__( + self, path: str, connection_key: ConnectionKey, os_error: OSError + ) -> None: + self._path = path + super().__init__(connection_key, os_error) + + @property + def path(self) -> str: + return self._path + + def __str__(self) -> str: + return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format( + self, self.ssl if self.ssl is not None else "default", self.strerror + ) + + +class ServerConnectionError(ClientConnectionError): + """Server connection errors.""" + + +class ServerDisconnectedError(ServerConnectionError): + """Server disconnected.""" + + def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None: + if message is None: + message = "Server disconnected" + + self.args = (message,) + self.message = message + + +class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError): + """Server timeout error.""" + + +class ServerFingerprintMismatch(ServerConnectionError): + """SSL certificate does not match expected fingerprint.""" + + def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None: + self.expected = expected + self.got = got + self.host = host + self.port = port + self.args = (expected, got, host, port) + + def __repr__(self) -> str: + return "<{} expected={!r} got={!r} host={!r} port={!r}>".format( + self.__class__.__name__, self.expected, self.got, self.host, self.port + ) + + +class ClientPayloadError(ClientError): + """Response payload error.""" + + +class InvalidURL(ClientError, ValueError): + """Invalid URL. + + URL used for fetching is malformed, e.g. it doesn't contains host + part. + """ + + # Derive from ValueError for backward compatibility + + def __init__(self, url: Any) -> None: + # The type of url is not yarl.URL because the exception can be raised + # on URL(url) call + super().__init__(url) + + @property + def url(self) -> Any: + return self.args[0] + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.url}>" + + +class ClientSSLError(ClientConnectorError): + """Base error for ssl.*Errors.""" + + +if ssl is not None: + cert_errors = (ssl.CertificateError,) + cert_errors_bases = ( + ClientSSLError, + ssl.CertificateError, + ) + + ssl_errors = (ssl.SSLError,) + ssl_error_bases = (ClientSSLError, ssl.SSLError) +else: # pragma: no cover + cert_errors = tuple() + cert_errors_bases = ( + ClientSSLError, + ValueError, + ) + + ssl_errors = tuple() + ssl_error_bases = (ClientSSLError,) + + +class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc] + """Response ssl error.""" + + +class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc] + """Response certificate error.""" + + def __init__( + self, connection_key: ConnectionKey, certificate_error: Exception + ) -> None: + self._conn_key = connection_key + self._certificate_error = certificate_error + self.args = (connection_key, certificate_error) + + @property + def certificate_error(self) -> Exception: + return self._certificate_error + + @property + def host(self) -> str: + return self._conn_key.host + + @property + def port(self) -> Optional[int]: + return self._conn_key.port + + @property + def ssl(self) -> bool: + return self._conn_key.is_ssl + + def __str__(self) -> str: + return ( + "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} " + "[{0.certificate_error.__class__.__name__}: " + "{0.certificate_error.args}]".format(self) + ) diff --git a/venv/lib/python3.10/site-packages/aiohttp/client_proto.py b/venv/lib/python3.10/site-packages/aiohttp/client_proto.py new file mode 100644 index 0000000..3041157 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/client_proto.py @@ -0,0 +1,251 @@ +import asyncio +from contextlib import suppress +from typing import Any, Optional, Tuple + +from .base_protocol import BaseProtocol +from .client_exceptions import ( + ClientOSError, + ClientPayloadError, + ServerDisconnectedError, + ServerTimeoutError, +) +from .helpers import BaseTimerContext +from .http import HttpResponseParser, RawResponseMessage +from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader + + +class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]): + """Helper class to adapt between Protocol and StreamReader.""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + BaseProtocol.__init__(self, loop=loop) + DataQueue.__init__(self, loop) + + self._should_close = False + + self._payload: Optional[StreamReader] = None + self._skip_payload = False + self._payload_parser = None + + self._timer = None + + self._tail = b"" + self._upgraded = False + self._parser: Optional[HttpResponseParser] = None + + self._read_timeout: Optional[float] = None + self._read_timeout_handle: Optional[asyncio.TimerHandle] = None + + @property + def upgraded(self) -> bool: + return self._upgraded + + @property + def should_close(self) -> bool: + if self._payload is not None and not self._payload.is_eof() or self._upgraded: + return True + + return ( + self._should_close + or self._upgraded + or self.exception() is not None + or self._payload_parser is not None + or len(self) > 0 + or bool(self._tail) + ) + + def force_close(self) -> None: + self._should_close = True + + def close(self) -> None: + transport = self.transport + if transport is not None: + transport.close() + self.transport = None + self._payload = None + self._drop_timeout() + + def is_connected(self) -> bool: + return self.transport is not None and not self.transport.is_closing() + + def connection_lost(self, exc: Optional[BaseException]) -> None: + self._drop_timeout() + + if self._payload_parser is not None: + with suppress(Exception): + self._payload_parser.feed_eof() + + uncompleted = None + if self._parser is not None: + try: + uncompleted = self._parser.feed_eof() + except Exception: + if self._payload is not None: + self._payload.set_exception( + ClientPayloadError("Response payload is not completed") + ) + + if not self.is_eof(): + if isinstance(exc, OSError): + exc = ClientOSError(*exc.args) + if exc is None: + exc = ServerDisconnectedError(uncompleted) + # assigns self._should_close to True as side effect, + # we do it anyway below + self.set_exception(exc) + + self._should_close = True + self._parser = None + self._payload = None + self._payload_parser = None + self._reading_paused = False + + super().connection_lost(exc) + + def eof_received(self) -> None: + # should call parser.feed_eof() most likely + self._drop_timeout() + + def pause_reading(self) -> None: + super().pause_reading() + self._drop_timeout() + + def resume_reading(self) -> None: + super().resume_reading() + self._reschedule_timeout() + + def set_exception(self, exc: BaseException) -> None: + self._should_close = True + self._drop_timeout() + super().set_exception(exc) + + def set_parser(self, parser: Any, payload: Any) -> None: + # TODO: actual types are: + # parser: WebSocketReader + # payload: FlowControlDataQueue + # but they are not generi enough + # Need an ABC for both types + self._payload = payload + self._payload_parser = parser + + self._drop_timeout() + + if self._tail: + data, self._tail = self._tail, b"" + self.data_received(data) + + def set_response_params( + self, + *, + timer: Optional[BaseTimerContext] = None, + skip_payload: bool = False, + read_until_eof: bool = False, + auto_decompress: bool = True, + read_timeout: Optional[float] = None, + read_bufsize: int = 2**16, + ) -> None: + self._skip_payload = skip_payload + + self._read_timeout = read_timeout + self._reschedule_timeout() + + self._parser = HttpResponseParser( + self, + self._loop, + read_bufsize, + timer=timer, + payload_exception=ClientPayloadError, + response_with_body=not skip_payload, + read_until_eof=read_until_eof, + auto_decompress=auto_decompress, + ) + + if self._tail: + data, self._tail = self._tail, b"" + self.data_received(data) + + def _drop_timeout(self) -> None: + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + self._read_timeout_handle = None + + def _reschedule_timeout(self) -> None: + timeout = self._read_timeout + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + + if timeout: + self._read_timeout_handle = self._loop.call_later( + timeout, self._on_read_timeout + ) + else: + self._read_timeout_handle = None + + def _on_read_timeout(self) -> None: + exc = ServerTimeoutError("Timeout on reading data from socket") + self.set_exception(exc) + if self._payload is not None: + self._payload.set_exception(exc) + + def data_received(self, data: bytes) -> None: + self._reschedule_timeout() + + if not data: + return + + # custom payload parser + if self._payload_parser is not None: + eof, tail = self._payload_parser.feed_data(data) + if eof: + self._payload = None + self._payload_parser = None + + if tail: + self.data_received(tail) + return + else: + if self._upgraded or self._parser is None: + # i.e. websocket connection, websocket parser is not set yet + self._tail += data + else: + # parse http messages + try: + messages, upgraded, tail = self._parser.feed_data(data) + except BaseException as exc: + if self.transport is not None: + # connection.release() could be called BEFORE + # data_received(), the transport is already + # closed in this case + self.transport.close() + # should_close is True after the call + self.set_exception(exc) + return + + self._upgraded = upgraded + + payload: Optional[StreamReader] = None + for message, payload in messages: + if message.should_close: + self._should_close = True + + self._payload = payload + + if self._skip_payload or message.code in (204, 304): + self.feed_data((message, EMPTY_PAYLOAD), 0) + else: + self.feed_data((message, payload), 0) + if payload is not None: + # new message(s) was processed + # register timeout handler unsubscribing + # either on end-of-stream or immediately for + # EMPTY_PAYLOAD + if payload is not EMPTY_PAYLOAD: + payload.on_eof(self._drop_timeout) + else: + self._drop_timeout() + + if tail: + if upgraded: + self.data_received(tail) + else: + self._tail = tail diff --git a/venv/lib/python3.10/site-packages/aiohttp/client_reqrep.py b/venv/lib/python3.10/site-packages/aiohttp/client_reqrep.py new file mode 100644 index 0000000..28b8a28 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/client_reqrep.py @@ -0,0 +1,1134 @@ +import asyncio +import codecs +import functools +import io +import re +import sys +import traceback +import warnings +from hashlib import md5, sha1, sha256 +from http.cookies import CookieError, Morsel, SimpleCookie +from types import MappingProxyType, TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterable, + List, + Mapping, + Optional, + Tuple, + Type, + Union, + cast, +) + +import attr +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy +from yarl import URL + +from . import hdrs, helpers, http, multipart, payload +from .abc import AbstractStreamWriter +from .client_exceptions import ( + ClientConnectionError, + ClientOSError, + ClientResponseError, + ContentTypeError, + InvalidURL, + ServerFingerprintMismatch, +) +from .formdata import FormData +from .helpers import ( + PY_36, + BaseTimerContext, + BasicAuth, + HeadersMixin, + TimerNoop, + noop, + reify, + set_result, +) +from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11, StreamWriter +from .log import client_logger +from .streams import StreamReader +from .typedefs import ( + DEFAULT_JSON_DECODER, + JSONDecoder, + LooseCookies, + LooseHeaders, + RawHeaders, +) + +try: + import ssl + from ssl import SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + +try: + import cchardet as chardet +except ImportError: # pragma: no cover + import charset_normalizer as chardet # type: ignore[no-redef] + + +__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint") + + +if TYPE_CHECKING: # pragma: no cover + from .client import ClientSession + from .connector import Connection + from .tracing import Trace + + +json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ContentDisposition: + type: Optional[str] + parameters: "MappingProxyType[str, str]" + filename: Optional[str] + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class RequestInfo: + url: URL + method: str + headers: "CIMultiDictProxy[str]" + real_url: URL = attr.ib() + + @real_url.default + def real_url_default(self) -> URL: + return self.url + + +class Fingerprint: + HASHFUNC_BY_DIGESTLEN = { + 16: md5, + 20: sha1, + 32: sha256, + } + + def __init__(self, fingerprint: bytes) -> None: + digestlen = len(fingerprint) + hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen) + if not hashfunc: + raise ValueError("fingerprint has invalid length") + elif hashfunc is md5 or hashfunc is sha1: + raise ValueError( + "md5 and sha1 are insecure and " "not supported. Use sha256." + ) + self._hashfunc = hashfunc + self._fingerprint = fingerprint + + @property + def fingerprint(self) -> bytes: + return self._fingerprint + + def check(self, transport: asyncio.Transport) -> None: + if not transport.get_extra_info("sslcontext"): + return + sslobj = transport.get_extra_info("ssl_object") + cert = sslobj.getpeercert(binary_form=True) + got = self._hashfunc(cert).digest() + if got != self._fingerprint: + host, port, *_ = transport.get_extra_info("peername") + raise ServerFingerprintMismatch(self._fingerprint, got, host, port) + + +if ssl is not None: + SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None)) +else: # pragma: no cover + SSL_ALLOWED_TYPES = type(None) + + +def _merge_ssl_params( + ssl: Union["SSLContext", bool, Fingerprint, None], + verify_ssl: Optional[bool], + ssl_context: Optional["SSLContext"], + fingerprint: Optional[bytes], +) -> Union["SSLContext", bool, Fingerprint, None]: + if verify_ssl is not None and not verify_ssl: + warnings.warn( + "verify_ssl is deprecated, use ssl=False instead", + DeprecationWarning, + stacklevel=3, + ) + if ssl is not None: + raise ValueError( + "verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive" + ) + else: + ssl = False + if ssl_context is not None: + warnings.warn( + "ssl_context is deprecated, use ssl=context instead", + DeprecationWarning, + stacklevel=3, + ) + if ssl is not None: + raise ValueError( + "verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive" + ) + else: + ssl = ssl_context + if fingerprint is not None: + warnings.warn( + "fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead", + DeprecationWarning, + stacklevel=3, + ) + if ssl is not None: + raise ValueError( + "verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive" + ) + else: + ssl = Fingerprint(fingerprint) + if not isinstance(ssl, SSL_ALLOWED_TYPES): + raise TypeError( + "ssl should be SSLContext, bool, Fingerprint or None, " + "got {!r} instead.".format(ssl) + ) + return ssl + + +@attr.s(auto_attribs=True, slots=True, frozen=True) +class ConnectionKey: + # the key should contain an information about used proxy / TLS + # to prevent reusing wrong connections from a pool + host: str + port: Optional[int] + is_ssl: bool + ssl: Union[SSLContext, None, bool, Fingerprint] + proxy: Optional[URL] + proxy_auth: Optional[BasicAuth] + proxy_headers_hash: Optional[int] # hash(CIMultiDict) + + +def _is_expected_content_type( + response_content_type: str, expected_content_type: str +) -> bool: + if expected_content_type == "application/json": + return json_re.match(response_content_type) is not None + return expected_content_type in response_content_type + + +class ClientRequest: + GET_METHODS = { + hdrs.METH_GET, + hdrs.METH_HEAD, + hdrs.METH_OPTIONS, + hdrs.METH_TRACE, + } + POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} + ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) + + DEFAULT_HEADERS = { + hdrs.ACCEPT: "*/*", + hdrs.ACCEPT_ENCODING: "gzip, deflate", + } + + body = b"" + auth = None + response = None + + _writer = None # async task for streaming data + _continue = None # waiter future for '100 Continue' response + + # N.B. + # Adding __del__ method with self._writer closing doesn't make sense + # because _writer is instance method, thus it keeps a reference to self. + # Until writer has finished finalizer will not be called. + + def __init__( + self, + method: str, + url: URL, + *, + params: Optional[Mapping[str, str]] = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Iterable[str] = frozenset(), + data: Any = None, + cookies: Optional[LooseCookies] = None, + auth: Optional[BasicAuth] = None, + version: http.HttpVersion = http.HttpVersion11, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + response_class: Optional[Type["ClientResponse"]] = None, + proxy: Optional[URL] = None, + proxy_auth: Optional[BasicAuth] = None, + timer: Optional[BaseTimerContext] = None, + session: Optional["ClientSession"] = None, + ssl: Union[SSLContext, bool, Fingerprint, None] = None, + proxy_headers: Optional[LooseHeaders] = None, + traces: Optional[List["Trace"]] = None, + ): + + if loop is None: + loop = asyncio.get_event_loop() + + assert isinstance(url, URL), url + assert isinstance(proxy, (URL, type(None))), proxy + # FIXME: session is None in tests only, need to fix tests + # assert session is not None + self._session = cast("ClientSession", session) + if params: + q = MultiDict(url.query) + url2 = url.with_query(params) + q.extend(url2.query) + url = url.with_query(q) + self.original_url = url + self.url = url.with_fragment(None) + self.method = method.upper() + self.chunked = chunked + self.compress = compress + self.loop = loop + self.length = None + if response_class is None: + real_response_class = ClientResponse + else: + real_response_class = response_class + self.response_class: Type[ClientResponse] = real_response_class + self._timer = timer if timer is not None else TimerNoop() + self._ssl = ssl + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + self.update_version(version) + self.update_host(url) + self.update_headers(headers) + self.update_auto_headers(skip_auto_headers) + self.update_cookies(cookies) + self.update_content_encoding(data) + self.update_auth(auth) + self.update_proxy(proxy, proxy_auth, proxy_headers) + + self.update_body_from_data(data) + if data is not None or self.method not in self.GET_METHODS: + self.update_transfer_encoding() + self.update_expect_continue(expect100) + if traces is None: + traces = [] + self._traces = traces + + def is_ssl(self) -> bool: + return self.url.scheme in ("https", "wss") + + @property + def ssl(self) -> Union["SSLContext", None, bool, Fingerprint]: + return self._ssl + + @property + def connection_key(self) -> ConnectionKey: + proxy_headers = self.proxy_headers + if proxy_headers: + h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items())) + else: + h = None + return ConnectionKey( + self.host, + self.port, + self.is_ssl(), + self.ssl, + self.proxy, + self.proxy_auth, + h, + ) + + @property + def host(self) -> str: + ret = self.url.raw_host + assert ret is not None + return ret + + @property + def port(self) -> Optional[int]: + return self.url.port + + @property + def request_info(self) -> RequestInfo: + headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) + return RequestInfo(self.url, self.method, headers, self.original_url) + + def update_host(self, url: URL) -> None: + """Update destination host, port and connection type (ssl).""" + # get host/port + if not url.raw_host: + raise InvalidURL(url) + + # basic auth info + username, password = url.user, url.password + if username: + self.auth = helpers.BasicAuth(username, password or "") + + def update_version(self, version: Union[http.HttpVersion, str]) -> None: + """Convert request version to two elements tuple. + + parser HTTP version '1.1' => (1, 1) + """ + if isinstance(version, str): + v = [part.strip() for part in version.split(".", 1)] + try: + version = http.HttpVersion(int(v[0]), int(v[1])) + except ValueError: + raise ValueError( + f"Can not parse http version number: {version}" + ) from None + self.version = version + + def update_headers(self, headers: Optional[LooseHeaders]) -> None: + """Update request headers.""" + self.headers: CIMultiDict[str] = CIMultiDict() + + # add host + netloc = cast(str, self.url.raw_host) + if helpers.is_ipv6_address(netloc): + netloc = f"[{netloc}]" + if self.url.port is not None and not self.url.is_default_port(): + netloc += ":" + str(self.url.port) + self.headers[hdrs.HOST] = netloc + + if headers: + if isinstance(headers, (dict, MultiDictProxy, MultiDict)): + headers = headers.items() # type: ignore[assignment] + + for key, value in headers: # type: ignore[misc] + # A special case for Host header + if key.lower() == "host": + self.headers[key] = value + else: + self.headers.add(key, value) + + def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None: + self.skip_auto_headers = CIMultiDict( + (hdr, None) for hdr in sorted(skip_auto_headers) + ) + used_headers = self.headers.copy() + used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + + for hdr, val in self.DEFAULT_HEADERS.items(): + if hdr not in used_headers: + self.headers.add(hdr, val) + + if hdrs.USER_AGENT not in used_headers: + self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE + + def update_cookies(self, cookies: Optional[LooseCookies]) -> None: + """Update request cookies header.""" + if not cookies: + return + + c: SimpleCookie[str] = SimpleCookie() + if hdrs.COOKIE in self.headers: + c.load(self.headers.get(hdrs.COOKIE, "")) + del self.headers[hdrs.COOKIE] + + if isinstance(cookies, Mapping): + iter_cookies = cookies.items() + else: + iter_cookies = cookies # type: ignore[assignment] + for name, value in iter_cookies: + if isinstance(value, Morsel): + # Preserve coded_value + mrsl_val = value.get(value.key, Morsel()) + mrsl_val.set(value.key, value.value, value.coded_value) + c[name] = mrsl_val + else: + c[name] = value # type: ignore[assignment] + + self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip() + + def update_content_encoding(self, data: Any) -> None: + """Set request content encoding.""" + if data is None: + return + + enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower() + if enc: + if self.compress: + raise ValueError( + "compress can not be set " "if Content-Encoding header is set" + ) + elif self.compress: + if not isinstance(self.compress, str): + self.compress = "deflate" + self.headers[hdrs.CONTENT_ENCODING] = self.compress + self.chunked = True # enable chunked, no need to deal with length + + def update_transfer_encoding(self) -> None: + """Analyze transfer-encoding header.""" + te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower() + + if "chunked" in te: + if self.chunked: + raise ValueError( + "chunked can not be set " + 'if "Transfer-Encoding: chunked" header is set' + ) + + elif self.chunked: + if hdrs.CONTENT_LENGTH in self.headers: + raise ValueError( + "chunked can not be set " "if Content-Length header is set" + ) + + self.headers[hdrs.TRANSFER_ENCODING] = "chunked" + else: + if hdrs.CONTENT_LENGTH not in self.headers: + self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) + + def update_auth(self, auth: Optional[BasicAuth]) -> None: + """Set basic auth.""" + if auth is None: + auth = self.auth + if auth is None: + return + + if not isinstance(auth, helpers.BasicAuth): + raise TypeError("BasicAuth() tuple is required instead") + + self.headers[hdrs.AUTHORIZATION] = auth.encode() + + def update_body_from_data(self, body: Any) -> None: + if body is None: + return + + # FormData + if isinstance(body, FormData): + body = body() + + try: + body = payload.PAYLOAD_REGISTRY.get(body, disposition=None) + except payload.LookupError: + body = FormData(body)() + + self.body = body + + # enable chunked encoding if needed + if not self.chunked: + if hdrs.CONTENT_LENGTH not in self.headers: + size = body.size + if size is None: + self.chunked = True + else: + if hdrs.CONTENT_LENGTH not in self.headers: + self.headers[hdrs.CONTENT_LENGTH] = str(size) + + # copy payload headers + assert body.headers + for (key, value) in body.headers.items(): + if key in self.headers: + continue + if key in self.skip_auto_headers: + continue + self.headers[key] = value + + def update_expect_continue(self, expect: bool = False) -> None: + if expect: + self.headers[hdrs.EXPECT] = "100-continue" + elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue": + expect = True + + if expect: + self._continue = self.loop.create_future() + + def update_proxy( + self, + proxy: Optional[URL], + proxy_auth: Optional[BasicAuth], + proxy_headers: Optional[LooseHeaders], + ) -> None: + if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): + raise ValueError("proxy_auth must be None or BasicAuth() tuple") + self.proxy = proxy + self.proxy_auth = proxy_auth + self.proxy_headers = proxy_headers + + def keep_alive(self) -> bool: + if self.version < HttpVersion10: + # keep alive not supported at all + return False + if self.version == HttpVersion10: + if self.headers.get(hdrs.CONNECTION) == "keep-alive": + return True + else: # no headers means we close for Http 1.0 + return False + elif self.headers.get(hdrs.CONNECTION) == "close": + return False + + return True + + async def write_bytes( + self, writer: AbstractStreamWriter, conn: "Connection" + ) -> None: + """Support coroutines that yields bytes objects.""" + # 100 response + if self._continue is not None: + await writer.drain() + await self._continue + + protocol = conn.protocol + assert protocol is not None + try: + if isinstance(self.body, payload.Payload): + await self.body.write(writer) + else: + if isinstance(self.body, (bytes, bytearray)): + self.body = (self.body,) # type: ignore[assignment] + + for chunk in self.body: + await writer.write(chunk) # type: ignore[arg-type] + + await writer.write_eof() + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + protocol.set_exception(exc) + else: + new_exc = ClientOSError( + exc.errno, "Can not write request body for %s" % self.url + ) + new_exc.__context__ = exc + new_exc.__cause__ = exc + protocol.set_exception(new_exc) + except asyncio.CancelledError as exc: + if not conn.closed: + protocol.set_exception(exc) + except Exception as exc: + protocol.set_exception(exc) + finally: + self._writer = None + + async def send(self, conn: "Connection") -> "ClientResponse": + # Specify request target: + # - CONNECT request must send authority form URI + # - not CONNECT proxy must send absolute form URI + # - most common is origin form URI + if self.method == hdrs.METH_CONNECT: + connect_host = self.url.raw_host + assert connect_host is not None + if helpers.is_ipv6_address(connect_host): + connect_host = f"[{connect_host}]" + path = f"{connect_host}:{self.url.port}" + elif self.proxy and not self.is_ssl(): + path = str(self.url) + else: + path = self.url.raw_path + if self.url.raw_query_string: + path += "?" + self.url.raw_query_string + + protocol = conn.protocol + assert protocol is not None + writer = StreamWriter( + protocol, + self.loop, + on_chunk_sent=functools.partial( + self._on_chunk_request_sent, self.method, self.url + ), + on_headers_sent=functools.partial( + self._on_headers_request_sent, self.method, self.url + ), + ) + + if self.compress: + writer.enable_compression(self.compress) + + if self.chunked is not None: + writer.enable_chunking() + + # set default content-type + if ( + self.method in self.POST_METHODS + and hdrs.CONTENT_TYPE not in self.skip_auto_headers + and hdrs.CONTENT_TYPE not in self.headers + ): + self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" + + # set the connection header + connection = self.headers.get(hdrs.CONNECTION) + if not connection: + if self.keep_alive(): + if self.version == HttpVersion10: + connection = "keep-alive" + else: + if self.version == HttpVersion11: + connection = "close" + + if connection is not None: + self.headers[hdrs.CONNECTION] = connection + + # status + headers + status_line = "{0} {1} HTTP/{2[0]}.{2[1]}".format( + self.method, path, self.version + ) + await writer.write_headers(status_line, self.headers) + + self._writer = self.loop.create_task(self.write_bytes(writer, conn)) + + response_class = self.response_class + assert response_class is not None + self.response = response_class( + self.method, + self.original_url, + writer=self._writer, + continue100=self._continue, + timer=self._timer, + request_info=self.request_info, + traces=self._traces, + loop=self.loop, + session=self._session, + ) + return self.response + + async def close(self) -> None: + if self._writer is not None: + try: + await self._writer + finally: + self._writer = None + + def terminate(self) -> None: + if self._writer is not None: + if not self.loop.is_closed(): + self._writer.cancel() + self._writer = None + + async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: + for trace in self._traces: + await trace.send_request_chunk_sent(method, url, chunk) + + async def _on_headers_request_sent( + self, method: str, url: URL, headers: "CIMultiDict[str]" + ) -> None: + for trace in self._traces: + await trace.send_request_headers(method, url, headers) + + +class ClientResponse(HeadersMixin): + + # from the Status-Line of the response + version = None # HTTP-Version + status: int = None # type: ignore[assignment] # Status-Code + reason = None # Reason-Phrase + + content: StreamReader = None # type: ignore[assignment] # Payload stream + _headers: "CIMultiDictProxy[str]" = None # type: ignore[assignment] + _raw_headers: RawHeaders = None # type: ignore[assignment] # Response raw headers + + _connection = None # current connection + _source_traceback = None + # setted up by ClientRequest after ClientResponse object creation + # post-init stage allows to not change ctor signature + _closed = True # to allow __del__ for non-initialized properly response + _released = False + + def __init__( + self, + method: str, + url: URL, + *, + writer: "asyncio.Task[None]", + continue100: Optional["asyncio.Future[bool]"], + timer: BaseTimerContext, + request_info: RequestInfo, + traces: List["Trace"], + loop: asyncio.AbstractEventLoop, + session: "ClientSession", + ) -> None: + assert isinstance(url, URL) + + self.method = method + self.cookies: SimpleCookie[str] = SimpleCookie() + + self._real_url = url + self._url = url.with_fragment(None) + self._body: Any = None + self._writer: Optional[asyncio.Task[None]] = writer + self._continue = continue100 # None by default + self._closed = True + self._history: Tuple[ClientResponse, ...] = () + self._request_info = request_info + self._timer = timer if timer is not None else TimerNoop() + self._cache: Dict[str, Any] = {} + self._traces = traces + self._loop = loop + # store a reference to session #1985 + self._session: Optional[ClientSession] = session + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + @reify + def url(self) -> URL: + return self._url + + @reify + def url_obj(self) -> URL: + warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2) + return self._url + + @reify + def real_url(self) -> URL: + return self._real_url + + @reify + def host(self) -> str: + assert self._url.host is not None + return self._url.host + + @reify + def headers(self) -> "CIMultiDictProxy[str]": + return self._headers + + @reify + def raw_headers(self) -> RawHeaders: + return self._raw_headers + + @reify + def request_info(self) -> RequestInfo: + return self._request_info + + @reify + def content_disposition(self) -> Optional[ContentDisposition]: + raw = self._headers.get(hdrs.CONTENT_DISPOSITION) + if raw is None: + return None + disposition_type, params_dct = multipart.parse_content_disposition(raw) + params = MappingProxyType(params_dct) + filename = multipart.content_disposition_filename(params) + return ContentDisposition(disposition_type, params, filename) + + def __del__(self, _warnings: Any = warnings) -> None: + if self._closed: + return + + if self._connection is not None: + self._connection.release() + self._cleanup_writer() + + if self._loop.get_debug(): + if PY_36: + kwargs = {"source": self} + else: + kwargs = {} + _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs) + context = {"client_response": self, "message": "Unclosed response"} + if self._source_traceback: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def __repr__(self) -> str: + out = io.StringIO() + ascii_encodable_url = str(self.url) + if self.reason: + ascii_encodable_reason = self.reason.encode( + "ascii", "backslashreplace" + ).decode("ascii") + else: + ascii_encodable_reason = self.reason + print( + "".format( + ascii_encodable_url, self.status, ascii_encodable_reason + ), + file=out, + ) + print(self.headers, file=out) + return out.getvalue() + + @property + def connection(self) -> Optional["Connection"]: + return self._connection + + @reify + def history(self) -> Tuple["ClientResponse", ...]: + """A sequence of of responses, if redirects occurred.""" + return self._history + + @reify + def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": + links_str = ", ".join(self.headers.getall("link", [])) + + if not links_str: + return MultiDictProxy(MultiDict()) + + links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict() + + for val in re.split(r",(?=\s*<)", links_str): + match = re.match(r"\s*<(.*)>(.*)", val) + if match is None: # pragma: no cover + # the check exists to suppress mypy error + continue + url, params_str = match.groups() + params = params_str.split(";")[1:] + + link: MultiDict[Union[str, URL]] = MultiDict() + + for param in params: + match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M) + if match is None: # pragma: no cover + # the check exists to suppress mypy error + continue + key, _, value, _ = match.groups() + + link.add(key, value) + + key = link.get("rel", url) # type: ignore[assignment] + + link.add("url", self.url.join(URL(url))) + + links.add(key, MultiDictProxy(link)) + + return MultiDictProxy(links) + + async def start(self, connection: "Connection") -> "ClientResponse": + """Start response processing.""" + self._closed = False + self._protocol = connection.protocol + self._connection = connection + + with self._timer: + while True: + # read response + try: + protocol = self._protocol + message, payload = await protocol.read() # type: ignore[union-attr] + except http.HttpProcessingError as exc: + raise ClientResponseError( + self.request_info, + self.history, + status=exc.code, + message=exc.message, + headers=exc.headers, + ) from exc + + if message.code < 100 or message.code > 199 or message.code == 101: + break + + if self._continue is not None: + set_result(self._continue, True) + self._continue = None + + # payload eof handler + payload.on_eof(self._response_eof) + + # response status + self.version = message.version + self.status = message.code + self.reason = message.reason + + # headers + self._headers = message.headers # type is CIMultiDictProxy + self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] + + # payload + self.content = payload + + # cookies + for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): + try: + self.cookies.load(hdr) + except CookieError as exc: + client_logger.warning("Can not load response cookies: %s", exc) + return self + + def _response_eof(self) -> None: + if self._closed: + return + + if self._connection is not None: + # websocket, protocol could be None because + # connection could be detached + if ( + self._connection.protocol is not None + and self._connection.protocol.upgraded + ): + return + + self._connection.release() + self._connection = None + + self._closed = True + self._cleanup_writer() + + @property + def closed(self) -> bool: + return self._closed + + def close(self) -> None: + if not self._released: + self._notify_content() + if self._closed: + return + + self._closed = True + if self._loop is None or self._loop.is_closed(): + return + + if self._connection is not None: + self._connection.close() + self._connection = None + self._cleanup_writer() + + def release(self) -> Any: + if not self._released: + self._notify_content() + if self._closed: + return noop() + + self._closed = True + if self._connection is not None: + self._connection.release() + self._connection = None + + self._cleanup_writer() + return noop() + + @property + def ok(self) -> bool: + """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not. + + This is **not** a check for ``200 OK`` but a check that the response + status is under 400. + """ + return 400 > self.status + + def raise_for_status(self) -> None: + if not self.ok: + # reason should always be not None for a started response + assert self.reason is not None + self.release() + raise ClientResponseError( + self.request_info, + self.history, + status=self.status, + message=self.reason, + headers=self.headers, + ) + + def _cleanup_writer(self) -> None: + if self._writer is not None: + self._writer.cancel() + self._writer = None + self._session = None + + def _notify_content(self) -> None: + content = self.content + if content and content.exception() is None: + content.set_exception(ClientConnectionError("Connection closed")) + self._released = True + + async def wait_for_close(self) -> None: + if self._writer is not None: + try: + await self._writer + finally: + self._writer = None + self.release() + + async def read(self) -> bytes: + """Read response payload.""" + if self._body is None: + try: + self._body = await self.content.read() + for trace in self._traces: + await trace.send_response_chunk_received( + self.method, self.url, self._body + ) + except BaseException: + self.close() + raise + elif self._released: + raise ClientConnectionError("Connection closed") + + return self._body # type: ignore[no-any-return] + + def get_encoding(self) -> str: + ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() + mimetype = helpers.parse_mimetype(ctype) + + encoding = mimetype.parameters.get("charset") + if encoding: + try: + codecs.lookup(encoding) + except LookupError: + encoding = None + if not encoding: + if mimetype.type == "application" and ( + mimetype.subtype == "json" or mimetype.subtype == "rdap" + ): + # RFC 7159 states that the default encoding is UTF-8. + # RFC 7483 defines application/rdap+json + encoding = "utf-8" + elif self._body is None: + raise RuntimeError( + "Cannot guess the encoding of " "a not yet read body" + ) + else: + encoding = chardet.detect(self._body)["encoding"] + if not encoding: + encoding = "utf-8" + + return encoding + + async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str: + """Read response payload and decode.""" + if self._body is None: + await self.read() + + if encoding is None: + encoding = self.get_encoding() + + return self._body.decode( # type: ignore[no-any-return,union-attr] + encoding, errors=errors + ) + + async def json( + self, + *, + encoding: Optional[str] = None, + loads: JSONDecoder = DEFAULT_JSON_DECODER, + content_type: Optional[str] = "application/json", + ) -> Any: + """Read and decodes JSON response.""" + if self._body is None: + await self.read() + + if content_type: + ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() + if not _is_expected_content_type(ctype, content_type): + raise ContentTypeError( + self.request_info, + self.history, + message=( + "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype + ), + headers=self.headers, + ) + + stripped = self._body.strip() # type: ignore[union-attr] + if not stripped: + return None + + if encoding is None: + encoding = self.get_encoding() + + return loads(stripped.decode(encoding)) + + async def __aenter__(self) -> "ClientResponse": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + # similar to _RequestContextManager, we do not need to check + # for exceptions, response object can close connection + # if state is broken + self.release() diff --git a/venv/lib/python3.10/site-packages/aiohttp/client_ws.py b/venv/lib/python3.10/site-packages/aiohttp/client_ws.py new file mode 100644 index 0000000..9a8ba84 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/client_ws.py @@ -0,0 +1,300 @@ +"""WebSocket client for asyncio.""" + +import asyncio +from typing import Any, Optional, cast + +import async_timeout + +from .client_exceptions import ClientError +from .client_reqrep import ClientResponse +from .helpers import call_later, set_result +from .http import ( + WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE, + WebSocketError, + WSCloseCode, + WSMessage, + WSMsgType, +) +from .http_websocket import WebSocketWriter # WSMessage +from .streams import EofStream, FlowControlDataQueue +from .typedefs import ( + DEFAULT_JSON_DECODER, + DEFAULT_JSON_ENCODER, + JSONDecoder, + JSONEncoder, +) + + +class ClientWebSocketResponse: + def __init__( + self, + reader: "FlowControlDataQueue[WSMessage]", + writer: WebSocketWriter, + protocol: Optional[str], + response: ClientResponse, + timeout: float, + autoclose: bool, + autoping: bool, + loop: asyncio.AbstractEventLoop, + *, + receive_timeout: Optional[float] = None, + heartbeat: Optional[float] = None, + compress: int = 0, + client_notakeover: bool = False, + ) -> None: + self._response = response + self._conn = response.connection + + self._writer = writer + self._reader = reader + self._protocol = protocol + self._closed = False + self._closing = False + self._close_code: Optional[int] = None + self._timeout = timeout + self._receive_timeout = receive_timeout + self._autoclose = autoclose + self._autoping = autoping + self._heartbeat = heartbeat + self._heartbeat_cb: Optional[asyncio.TimerHandle] = None + if heartbeat is not None: + self._pong_heartbeat = heartbeat / 2.0 + self._pong_response_cb: Optional[asyncio.TimerHandle] = None + self._loop = loop + self._waiting: Optional[asyncio.Future[bool]] = None + self._exception: Optional[BaseException] = None + self._compress = compress + self._client_notakeover = client_notakeover + + self._reset_heartbeat() + + def _cancel_heartbeat(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None + + if self._heartbeat_cb is not None: + self._heartbeat_cb.cancel() + self._heartbeat_cb = None + + def _reset_heartbeat(self) -> None: + self._cancel_heartbeat() + + if self._heartbeat is not None: + self._heartbeat_cb = call_later( + self._send_heartbeat, self._heartbeat, self._loop + ) + + def _send_heartbeat(self) -> None: + if self._heartbeat is not None and not self._closed: + # fire-and-forget a task is not perfect but maybe ok for + # sending ping. Otherwise we need a long-living heartbeat + # task in the class. + self._loop.create_task(self._writer.ping()) + + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = call_later( + self._pong_not_received, self._pong_heartbeat, self._loop + ) + + def _pong_not_received(self) -> None: + if not self._closed: + self._closed = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = asyncio.TimeoutError() + self._response.close() + + @property + def closed(self) -> bool: + return self._closed + + @property + def close_code(self) -> Optional[int]: + return self._close_code + + @property + def protocol(self) -> Optional[str]: + return self._protocol + + @property + def compress(self) -> int: + return self._compress + + @property + def client_notakeover(self) -> bool: + return self._client_notakeover + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """extra info from connection transport""" + conn = self._response.connection + if conn is None: + return default + transport = conn.transport + if transport is None: + return default + return transport.get_extra_info(name, default) + + def exception(self) -> Optional[BaseException]: + return self._exception + + async def ping(self, message: bytes = b"") -> None: + await self._writer.ping(message) + + async def pong(self, message: bytes = b"") -> None: + await self._writer.pong(message) + + async def send_str(self, data: str, compress: Optional[int] = None) -> None: + if not isinstance(data, str): + raise TypeError("data argument must be str (%r)" % type(data)) + await self._writer.send(data, binary=False, compress=compress) + + async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError("data argument must be byte-ish (%r)" % type(data)) + await self._writer.send(data, binary=True, compress=compress) + + async def send_json( + self, + data: Any, + compress: Optional[int] = None, + *, + dumps: JSONEncoder = DEFAULT_JSON_ENCODER, + ) -> None: + await self.send_str(dumps(data), compress=compress) + + async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool: + # we need to break `receive()` cycle first, + # `close()` may be called from different task + if self._waiting is not None and not self._closed: + self._reader.feed_data(WS_CLOSING_MESSAGE, 0) + await self._waiting + + if not self._closed: + self._cancel_heartbeat() + self._closed = True + try: + await self._writer.close(code, message) + except asyncio.CancelledError: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._response.close() + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + self._response.close() + return True + + if self._closing: + self._response.close() + return True + + while True: + try: + async with async_timeout.timeout(self._timeout): + msg = await self._reader.read() + except asyncio.CancelledError: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._response.close() + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + self._response.close() + return True + + if msg.type == WSMsgType.CLOSE: + self._close_code = msg.data + self._response.close() + return True + else: + return False + + async def receive(self, timeout: Optional[float] = None) -> WSMessage: + while True: + if self._waiting is not None: + raise RuntimeError("Concurrent call to receive() is not allowed") + + if self._closed: + return WS_CLOSED_MESSAGE + elif self._closing: + await self.close() + return WS_CLOSED_MESSAGE + + try: + self._waiting = self._loop.create_future() + try: + async with async_timeout.timeout(timeout or self._receive_timeout): + msg = await self._reader.read() + self._reset_heartbeat() + finally: + waiter = self._waiting + self._waiting = None + set_result(waiter, True) + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + raise + except EofStream: + self._close_code = WSCloseCode.OK + await self.close() + return WSMessage(WSMsgType.CLOSED, None, None) + except ClientError: + self._closed = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + return WS_CLOSED_MESSAGE + except WebSocketError as exc: + self._close_code = exc.code + await self.close(code=exc.code) + return WSMessage(WSMsgType.ERROR, exc, None) + except Exception as exc: + self._exception = exc + self._closing = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + await self.close() + return WSMessage(WSMsgType.ERROR, exc, None) + + if msg.type == WSMsgType.CLOSE: + self._closing = True + self._close_code = msg.data + if not self._closed and self._autoclose: + await self.close() + elif msg.type == WSMsgType.CLOSING: + self._closing = True + elif msg.type == WSMsgType.PING and self._autoping: + await self.pong(msg.data) + continue + elif msg.type == WSMsgType.PONG and self._autoping: + continue + + return msg + + async def receive_str(self, *, timeout: Optional[float] = None) -> str: + msg = await self.receive(timeout) + if msg.type != WSMsgType.TEXT: + raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str") + return cast(str, msg.data) + + async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: + msg = await self.receive(timeout) + if msg.type != WSMsgType.BINARY: + raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") + return cast(bytes, msg.data) + + async def receive_json( + self, + *, + loads: JSONDecoder = DEFAULT_JSON_DECODER, + timeout: Optional[float] = None, + ) -> Any: + data = await self.receive_str(timeout=timeout) + return loads(data) + + def __aiter__(self) -> "ClientWebSocketResponse": + return self + + async def __anext__(self) -> WSMessage: + msg = await self.receive() + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): + raise StopAsyncIteration + return msg diff --git a/venv/lib/python3.10/site-packages/aiohttp/connector.py b/venv/lib/python3.10/site-packages/aiohttp/connector.py new file mode 100644 index 0000000..bf40689 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/connector.py @@ -0,0 +1,1453 @@ +import asyncio +import functools +import random +import sys +import traceback +import warnings +from collections import defaultdict, deque +from contextlib import suppress +from http.cookies import SimpleCookie +from itertools import cycle, islice +from time import monotonic +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + DefaultDict, + Dict, + Iterator, + List, + Optional, + Set, + Tuple, + Type, + Union, + cast, +) + +import attr + +from . import hdrs, helpers +from .abc import AbstractResolver +from .client_exceptions import ( + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorError, + ClientConnectorSSLError, + ClientHttpProxyError, + ClientProxyConnectionError, + ServerFingerprintMismatch, + UnixClientConnectorError, + cert_errors, + ssl_errors, +) +from .client_proto import ResponseHandler +from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params +from .helpers import ( + PY_36, + ceil_timeout, + get_running_loop, + is_ip_address, + noop, + sentinel, +) +from .http import RESPONSES +from .locks import EventResultOrError +from .resolver import DefaultResolver + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") + + +if TYPE_CHECKING: # pragma: no cover + from .client import ClientTimeout + from .client_reqrep import ConnectionKey + from .tracing import Trace + + +class _DeprecationWaiter: + __slots__ = ("_awaitable", "_awaited") + + def __init__(self, awaitable: Awaitable[Any]) -> None: + self._awaitable = awaitable + self._awaited = False + + def __await__(self) -> Any: + self._awaited = True + return self._awaitable.__await__() + + def __del__(self) -> None: + if not self._awaited: + warnings.warn( + "Connector.close() is a coroutine, " + "please use await connector.close()", + DeprecationWarning, + ) + + +class Connection: + + _source_traceback = None + _transport = None + + def __init__( + self, + connector: "BaseConnector", + key: "ConnectionKey", + protocol: ResponseHandler, + loop: asyncio.AbstractEventLoop, + ) -> None: + self._key = key + self._connector = connector + self._loop = loop + self._protocol: Optional[ResponseHandler] = protocol + self._callbacks: List[Callable[[], None]] = [] + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + def __repr__(self) -> str: + return f"Connection<{self._key}>" + + def __del__(self, _warnings: Any = warnings) -> None: + if self._protocol is not None: + if PY_36: + kwargs = {"source": self} + else: + kwargs = {} + _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs) + if self._loop.is_closed(): + return + + self._connector._release(self._key, self._protocol, should_close=True) + + context = {"client_connection": self, "message": "Unclosed connection"} + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + @property + def loop(self) -> asyncio.AbstractEventLoop: + warnings.warn( + "connector.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + @property + def transport(self) -> Optional[asyncio.Transport]: + if self._protocol is None: + return None + return self._protocol.transport + + @property + def protocol(self) -> Optional[ResponseHandler]: + return self._protocol + + def add_callback(self, callback: Callable[[], None]) -> None: + if callback is not None: + self._callbacks.append(callback) + + def _notify_release(self) -> None: + callbacks, self._callbacks = self._callbacks[:], [] + + for cb in callbacks: + with suppress(Exception): + cb() + + def close(self) -> None: + self._notify_release() + + if self._protocol is not None: + self._connector._release(self._key, self._protocol, should_close=True) + self._protocol = None + + def release(self) -> None: + self._notify_release() + + if self._protocol is not None: + self._connector._release( + self._key, self._protocol, should_close=self._protocol.should_close + ) + self._protocol = None + + @property + def closed(self) -> bool: + return self._protocol is None or not self._protocol.is_connected() + + +class _TransportPlaceholder: + """placeholder for BaseConnector.connect function""" + + def close(self) -> None: + pass + + +class BaseConnector: + """Base connector class. + + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + enable_cleanup_closed - Enables clean-up closed ssl transports. + Disabled by default. + loop - Optional event loop. + """ + + _closed = True # prevent AttributeError in __del__ if ctor was failed + _source_traceback = None + + # abort transport after 2 seconds (cleanup broken connections) + _cleanup_closed_period = 2.0 + + def __init__( + self, + *, + keepalive_timeout: Union[object, None, float] = sentinel, + force_close: bool = False, + limit: int = 100, + limit_per_host: int = 0, + enable_cleanup_closed: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + + if force_close: + if keepalive_timeout is not None and keepalive_timeout is not sentinel: + raise ValueError( + "keepalive_timeout cannot " "be set if force_close is True" + ) + else: + if keepalive_timeout is sentinel: + keepalive_timeout = 15.0 + + loop = get_running_loop(loop) + + self._closed = False + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + self._conns: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] = {} + self._limit = limit + self._limit_per_host = limit_per_host + self._acquired: Set[ResponseHandler] = set() + self._acquired_per_host: DefaultDict[ + ConnectionKey, Set[ResponseHandler] + ] = defaultdict(set) + self._keepalive_timeout = cast(float, keepalive_timeout) + self._force_close = force_close + + # {host_key: FIFO list of waiters} + self._waiters = defaultdict(deque) # type: ignore[var-annotated] + + self._loop = loop + self._factory = functools.partial(ResponseHandler, loop=loop) + + self.cookies: SimpleCookie[str] = SimpleCookie() + + # start keep-alive connection cleanup task + self._cleanup_handle: Optional[asyncio.TimerHandle] = None + + # start cleanup closed transports task + self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None + self._cleanup_closed_disabled = not enable_cleanup_closed + self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = [] + self._cleanup_closed() + + def __del__(self, _warnings: Any = warnings) -> None: + if self._closed: + return + if not self._conns: + return + + conns = [repr(c) for c in self._conns.values()] + + self._close() + + if PY_36: + kwargs = {"source": self} + else: + kwargs = {} + _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs) + context = { + "connector": self, + "connections": conns, + "message": "Unclosed connector", + } + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def __enter__(self) -> "BaseConnector": + warnings.warn( + '"with Connector():" is deprecated, ' + 'use "async with Connector():" instead', + DeprecationWarning, + ) + return self + + def __exit__(self, *exc: Any) -> None: + self._close() + + async def __aenter__(self) -> "BaseConnector": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + exc_traceback: Optional[TracebackType] = None, + ) -> None: + await self.close() + + @property + def force_close(self) -> bool: + """Ultimately close connection on releasing if True.""" + return self._force_close + + @property + def limit(self) -> int: + """The total number for simultaneous connections. + + If limit is 0 the connector has no limit. + The default limit size is 100. + """ + return self._limit + + @property + def limit_per_host(self) -> int: + """The limit for simultaneous connections to the same endpoint. + + Endpoints are the same if they are have equal + (host, port, is_ssl) triple. + """ + return self._limit_per_host + + def _cleanup(self) -> None: + """Cleanup unused transports.""" + if self._cleanup_handle: + self._cleanup_handle.cancel() + # _cleanup_handle should be unset, otherwise _release() will not + # recreate it ever! + self._cleanup_handle = None + + now = self._loop.time() + timeout = self._keepalive_timeout + + if self._conns: + connections = {} + deadline = now - timeout + for key, conns in self._conns.items(): + alive = [] + for proto, use_time in conns: + if proto.is_connected(): + if use_time - deadline < 0: + transport = proto.transport + proto.close() + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + alive.append((proto, use_time)) + else: + transport = proto.transport + proto.close() + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + + if alive: + connections[key] = alive + + self._conns = connections + + if self._conns: + self._cleanup_handle = helpers.weakref_handle( + self, "_cleanup", timeout, self._loop + ) + + def _drop_acquired_per_host( + self, key: "ConnectionKey", val: ResponseHandler + ) -> None: + acquired_per_host = self._acquired_per_host + if key not in acquired_per_host: + return + conns = acquired_per_host[key] + conns.remove(val) + if not conns: + del self._acquired_per_host[key] + + def _cleanup_closed(self) -> None: + """Double confirmation for transport close. + + Some broken ssl servers may leave socket open without proper close. + """ + if self._cleanup_closed_handle: + self._cleanup_closed_handle.cancel() + + for transport in self._cleanup_closed_transports: + if transport is not None: + transport.abort() + + self._cleanup_closed_transports = [] + + if not self._cleanup_closed_disabled: + self._cleanup_closed_handle = helpers.weakref_handle( + self, "_cleanup_closed", self._cleanup_closed_period, self._loop + ) + + def close(self) -> Awaitable[None]: + """Close all opened transports.""" + self._close() + return _DeprecationWaiter(noop()) + + def _close(self) -> None: + if self._closed: + return + + self._closed = True + + try: + if self._loop.is_closed(): + return + + # cancel cleanup task + if self._cleanup_handle: + self._cleanup_handle.cancel() + + # cancel cleanup close task + if self._cleanup_closed_handle: + self._cleanup_closed_handle.cancel() + + for data in self._conns.values(): + for proto, t0 in data: + proto.close() + + for proto in self._acquired: + proto.close() + + for transport in self._cleanup_closed_transports: + if transport is not None: + transport.abort() + + finally: + self._conns.clear() + self._acquired.clear() + self._waiters.clear() + self._cleanup_handle = None + self._cleanup_closed_transports.clear() + self._cleanup_closed_handle = None + + @property + def closed(self) -> bool: + """Is connector closed. + + A readonly property. + """ + return self._closed + + def _available_connections(self, key: "ConnectionKey") -> int: + """ + Return number of available connections. + + The limit, limit_per_host and the connection key are taken into account. + + If it returns less than 1 means that there are no connections + available. + """ + if self._limit: + # total calc available connections + available = self._limit - len(self._acquired) + + # check limit per host + if ( + self._limit_per_host + and available > 0 + and key in self._acquired_per_host + ): + acquired = self._acquired_per_host.get(key) + assert acquired is not None + available = self._limit_per_host - len(acquired) + + elif self._limit_per_host and key in self._acquired_per_host: + # check limit per host + acquired = self._acquired_per_host.get(key) + assert acquired is not None + available = self._limit_per_host - len(acquired) + else: + available = 1 + + return available + + async def connect( + self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" + ) -> Connection: + """Get from pool or create new connection.""" + key = req.connection_key + available = self._available_connections(key) + + # Wait if there are no available connections or if there are/were + # waiters (i.e. don't steal connection from a waiter about to wake up) + if available <= 0 or key in self._waiters: + fut = self._loop.create_future() + + # This connection will now count towards the limit. + self._waiters[key].append(fut) + + if traces: + for trace in traces: + await trace.send_connection_queued_start() + + try: + await fut + except BaseException as e: + if key in self._waiters: + # remove a waiter even if it was cancelled, normally it's + # removed when it's notified + try: + self._waiters[key].remove(fut) + except ValueError: # fut may no longer be in list + pass + + raise e + finally: + if key in self._waiters and not self._waiters[key]: + del self._waiters[key] + + if traces: + for trace in traces: + await trace.send_connection_queued_end() + + proto = self._get(key) + if proto is None: + placeholder = cast(ResponseHandler, _TransportPlaceholder()) + self._acquired.add(placeholder) + self._acquired_per_host[key].add(placeholder) + + if traces: + for trace in traces: + await trace.send_connection_create_start() + + try: + proto = await self._create_connection(req, traces, timeout) + if self._closed: + proto.close() + raise ClientConnectionError("Connector is closed.") + except BaseException: + if not self._closed: + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + self._release_waiter() + raise + else: + if not self._closed: + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + + if traces: + for trace in traces: + await trace.send_connection_create_end() + else: + if traces: + # Acquire the connection to prevent race conditions with limits + placeholder = cast(ResponseHandler, _TransportPlaceholder()) + self._acquired.add(placeholder) + self._acquired_per_host[key].add(placeholder) + for trace in traces: + await trace.send_connection_reuseconn() + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + + self._acquired.add(proto) + self._acquired_per_host[key].add(proto) + return Connection(self, key, proto, self._loop) + + def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: + try: + conns = self._conns[key] + except KeyError: + return None + + t1 = self._loop.time() + while conns: + proto, t0 = conns.pop() + if proto.is_connected(): + if t1 - t0 > self._keepalive_timeout: + transport = proto.transport + proto.close() + # only for SSL transports + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + if not conns: + # The very last connection was reclaimed: drop the key + del self._conns[key] + return proto + else: + transport = proto.transport + proto.close() + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + + # No more connections: drop the key + del self._conns[key] + return None + + def _release_waiter(self) -> None: + """ + Iterates over all waiters until one to be released is found. + + The one to be released is not finsihed and + belongs to a host that has available connections. + """ + if not self._waiters: + return + + # Having the dict keys ordered this avoids to iterate + # at the same order at each call. + queues = list(self._waiters.keys()) + random.shuffle(queues) + + for key in queues: + if self._available_connections(key) < 1: + continue + + waiters = self._waiters[key] + while waiters: + waiter = waiters.popleft() + if not waiter.done(): + waiter.set_result(None) + return + + def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None: + if self._closed: + # acquired connection is already released on connector closing + return + + try: + self._acquired.remove(proto) + self._drop_acquired_per_host(key, proto) + except KeyError: # pragma: no cover + # this may be result of undetermenistic order of objects + # finalization due garbage collection. + pass + else: + self._release_waiter() + + def _release( + self, + key: "ConnectionKey", + protocol: ResponseHandler, + *, + should_close: bool = False, + ) -> None: + if self._closed: + # acquired connection is already released on connector closing + return + + self._release_acquired(key, protocol) + + if self._force_close: + should_close = True + + if should_close or protocol.should_close: + transport = protocol.transport + protocol.close() + + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + conns = self._conns.get(key) + if conns is None: + conns = self._conns[key] = [] + conns.append((protocol, self._loop.time())) + + if self._cleanup_handle is None: + self._cleanup_handle = helpers.weakref_handle( + self, "_cleanup", self._keepalive_timeout, self._loop + ) + + async def _create_connection( + self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + raise NotImplementedError() + + +class _DNSCacheTable: + def __init__(self, ttl: Optional[float] = None) -> None: + self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] = {} + self._timestamps: Dict[Tuple[str, int], float] = {} + self._ttl = ttl + + def __contains__(self, host: object) -> bool: + return host in self._addrs_rr + + def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None: + self._addrs_rr[key] = (cycle(addrs), len(addrs)) + + if self._ttl: + self._timestamps[key] = monotonic() + + def remove(self, key: Tuple[str, int]) -> None: + self._addrs_rr.pop(key, None) + + if self._ttl: + self._timestamps.pop(key, None) + + def clear(self) -> None: + self._addrs_rr.clear() + self._timestamps.clear() + + def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]: + loop, length = self._addrs_rr[key] + addrs = list(islice(loop, length)) + # Consume one more element to shift internal state of `cycle` + next(loop) + return addrs + + def expired(self, key: Tuple[str, int]) -> bool: + if self._ttl is None: + return False + + return self._timestamps[key] + self._ttl < monotonic() + + +class TCPConnector(BaseConnector): + """TCP connector. + + verify_ssl - Set to True to check ssl certifications. + fingerprint - Pass the binary sha256 + digest of the expected certificate in DER format to verify + that the certificate the server presents matches. See also + https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning + resolver - Enable DNS lookups and use this + resolver + use_dns_cache - Use memory cache for DNS lookups. + ttl_dns_cache - Max seconds having cached a DNS entry, None forever. + family - socket address family + local_addr - local tuple of (host, port) to bind socket to + + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + enable_cleanup_closed - Enables clean-up closed ssl transports. + Disabled by default. + loop - Optional event loop. + """ + + def __init__( + self, + *, + verify_ssl: bool = True, + fingerprint: Optional[bytes] = None, + use_dns_cache: bool = True, + ttl_dns_cache: Optional[int] = 10, + family: int = 0, + ssl_context: Optional[SSLContext] = None, + ssl: Union[None, bool, Fingerprint, SSLContext] = None, + local_addr: Optional[Tuple[str, int]] = None, + resolver: Optional[AbstractResolver] = None, + keepalive_timeout: Union[None, float, object] = sentinel, + force_close: bool = False, + limit: int = 100, + limit_per_host: int = 0, + enable_cleanup_closed: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + ): + super().__init__( + keepalive_timeout=keepalive_timeout, + force_close=force_close, + limit=limit, + limit_per_host=limit_per_host, + enable_cleanup_closed=enable_cleanup_closed, + loop=loop, + ) + + self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + if resolver is None: + resolver = DefaultResolver(loop=self._loop) + self._resolver = resolver + + self._use_dns_cache = use_dns_cache + self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) + self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {} + self._family = family + self._local_addr = local_addr + + def close(self) -> Awaitable[None]: + """Close all ongoing DNS calls.""" + for ev in self._throttle_dns_events.values(): + ev.cancel() + + return super().close() + + @property + def family(self) -> int: + """Socket family like AF_INET.""" + return self._family + + @property + def use_dns_cache(self) -> bool: + """True if local DNS caching is enabled.""" + return self._use_dns_cache + + def clear_dns_cache( + self, host: Optional[str] = None, port: Optional[int] = None + ) -> None: + """Remove specified host/port or clear all dns local cache.""" + if host is not None and port is not None: + self._cached_hosts.remove((host, port)) + elif host is not None or port is not None: + raise ValueError("either both host and port " "or none of them are allowed") + else: + self._cached_hosts.clear() + + async def _resolve_host( + self, host: str, port: int, traces: Optional[List["Trace"]] = None + ) -> List[Dict[str, Any]]: + if is_ip_address(host): + return [ + { + "hostname": host, + "host": host, + "port": port, + "family": self._family, + "proto": 0, + "flags": 0, + } + ] + + if not self._use_dns_cache: + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_start(host) + + res = await self._resolver.resolve(host, port, family=self._family) + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_end(host) + + return res + + key = (host, port) + + if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)): + # get result early, before any await (#4014) + result = self._cached_hosts.next_addrs(key) + + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + return result + + if key in self._throttle_dns_events: + # get event early, before any await (#4014) + event = self._throttle_dns_events[key] + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + await event.wait() + else: + # update dict early, before any await (#4014) + self._throttle_dns_events[key] = EventResultOrError(self._loop) + if traces: + for trace in traces: + await trace.send_dns_cache_miss(host) + try: + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_start(host) + + addrs = await self._resolver.resolve(host, port, family=self._family) + if traces: + for trace in traces: + await trace.send_dns_resolvehost_end(host) + + self._cached_hosts.add(key, addrs) + self._throttle_dns_events[key].set() + except BaseException as e: + # any DNS exception, independently of the implementation + # is set for the waiters to raise the same exception. + self._throttle_dns_events[key].set(exc=e) + raise + finally: + self._throttle_dns_events.pop(key) + + return self._cached_hosts.next_addrs(key) + + async def _create_connection( + self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + """Create connection. + + Has same keyword arguments as BaseEventLoop.create_connection. + """ + if req.proxy: + _, proto = await self._create_proxy_connection(req, traces, timeout) + else: + _, proto = await self._create_direct_connection(req, traces, timeout) + + return proto + + @staticmethod + @functools.lru_cache(None) + def _make_ssl_context(verified: bool) -> SSLContext: + if verified: + return ssl.create_default_context() + else: + sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.check_hostname = False + sslcontext.verify_mode = ssl.CERT_NONE + try: + sslcontext.options |= ssl.OP_NO_COMPRESSION + except AttributeError as attr_err: + warnings.warn( + "{!s}: The Python interpreter is compiled " + "against OpenSSL < 1.0.0. Ref: " + "https://docs.python.org/3/library/ssl.html" + "#ssl.OP_NO_COMPRESSION".format(attr_err), + ) + sslcontext.set_default_verify_paths() + return sslcontext + + def _get_ssl_context(self, req: "ClientRequest") -> Optional[SSLContext]: + """Logic to get the correct SSL context + + 0. if req.ssl is false, return None + + 1. if ssl_context is specified in req, use it + 2. if _ssl_context is specified in self, use it + 3. otherwise: + 1. if verify_ssl is not specified in req, use self.ssl_context + (will generate a default context according to self.verify_ssl) + 2. if verify_ssl is True in req, generate a default SSL context + 3. if verify_ssl is False in req, generate a SSL context that + won't verify + """ + if req.is_ssl(): + if ssl is None: # pragma: no cover + raise RuntimeError("SSL is not supported.") + sslcontext = req.ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not None: + # not verified or fingerprinted + return self._make_ssl_context(False) + sslcontext = self._ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not None: + # not verified or fingerprinted + return self._make_ssl_context(False) + return self._make_ssl_context(True) + else: + return None + + def _get_fingerprint(self, req: "ClientRequest") -> Optional["Fingerprint"]: + ret = req.ssl + if isinstance(ret, Fingerprint): + return ret + ret = self._ssl + if isinstance(ret, Fingerprint): + return ret + return None + + async def _wrap_create_connection( + self, + *args: Any, + req: "ClientRequest", + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + **kwargs: Any, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + try: + async with ceil_timeout(timeout.sock_connect): + return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + + def _fail_on_no_start_tls(self, req: "ClientRequest") -> None: + """Raise a :py:exc:`RuntimeError` on missing ``start_tls()``. + + One case is that :py:meth:`asyncio.loop.start_tls` is not yet + implemented under Python 3.6. It is necessary for TLS-in-TLS so + that it is possible to send HTTPS queries through HTTPS proxies. + + This doesn't affect regular HTTP requests, though. + """ + if not req.is_ssl(): + return + + proxy_url = req.proxy + assert proxy_url is not None + if proxy_url.scheme != "https": + return + + self._check_loop_for_start_tls() + + def _check_loop_for_start_tls(self) -> None: + try: + self._loop.start_tls + except AttributeError as attr_exc: + raise RuntimeError( + "An HTTPS request is being sent through an HTTPS proxy. " + "This needs support for TLS in TLS but it is not implemented " + "in your runtime for the stdlib asyncio.\n\n" + "Please upgrade to Python 3.7 or higher. For more details, " + "please see:\n" + "* https://bugs.python.org/issue37179\n" + "* https://github.com/python/cpython/pull/28073\n" + "* https://docs.aiohttp.org/en/stable/" + "client_advanced.html#proxy-support\n" + "* https://github.com/aio-libs/aiohttp/discussions/6044\n", + ) from attr_exc + + def _loop_supports_start_tls(self) -> bool: + try: + self._check_loop_for_start_tls() + except RuntimeError: + return False + else: + return True + + def _warn_about_tls_in_tls( + self, + underlying_transport: asyncio.Transport, + req: "ClientRequest", + ) -> None: + """Issue a warning if the requested URL has HTTPS scheme.""" + if req.request_info.url.scheme != "https": + return + + asyncio_supports_tls_in_tls = getattr( + underlying_transport, + "_start_tls_compatible", + False, + ) + + if asyncio_supports_tls_in_tls: + return + + warnings.warn( + "An HTTPS request is being sent through an HTTPS proxy. " + "This support for TLS in TLS is known to be disabled " + "in the stdlib asyncio. This is why you'll probably see " + "an error in the log below.\n\n" + "It is possible to enable it via monkeypatching under " + "Python 3.7 or higher. For more details, see:\n" + "* https://bugs.python.org/issue37179\n" + "* https://github.com/python/cpython/pull/28073\n\n" + "You can temporarily patch this as follows:\n" + "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n" + "* https://github.com/aio-libs/aiohttp/discussions/6044\n", + RuntimeWarning, + source=self, + # Why `4`? At least 3 of the calls in the stack originate + # from the methods in this class. + stacklevel=3, + ) + + async def _start_tls_connection( + self, + underlying_transport: asyncio.Transport, + req: "ClientRequest", + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: + """Wrap the raw TCP transport with TLS.""" + tls_proto = self._factory() # Create a brand new proto for TLS + + # Safety of the `cast()` call here is based on the fact that + # internally `_get_ssl_context()` only returns `None` when + # `req.is_ssl()` evaluates to `False` which is never gonna happen + # in this code path. Of course, it's rather fragile + # maintainability-wise but this is to be solved separately. + sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req)) + + try: + async with ceil_timeout(timeout.sock_connect): + try: + tls_transport = await self._loop.start_tls( + underlying_transport, + tls_proto, + sslcontext, + server_hostname=req.host, + ssl_handshake_timeout=timeout.total, + ) + except BaseException: + # We need to close the underlying transport since + # `start_tls()` probably failed before it had a + # chance to do this: + underlying_transport.close() + raise + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + except TypeError as type_err: + # Example cause looks like this: + # TypeError: transport is not supported by start_tls() + + raise ClientConnectionError( + "Cannot initialize a TLS-in-TLS connection to host " + f"{req.host!s}:{req.port:d} through an underlying connection " + f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} " + f"[{type_err!s}]" + ) from type_err + else: + tls_proto.connection_made( + tls_transport + ) # Kick the state machine of the new TLS protocol + + return tls_transport, tls_proto + + async def _create_direct_connection( + self, + req: "ClientRequest", + traces: List["Trace"], + timeout: "ClientTimeout", + *, + client_error: Type[Exception] = ClientConnectorError, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + sslcontext = self._get_ssl_context(req) + fingerprint = self._get_fingerprint(req) + + host = req.url.raw_host + assert host is not None + port = req.port + assert port is not None + host_resolved = asyncio.ensure_future( + self._resolve_host(host, port, traces=traces), loop=self._loop + ) + try: + # Cancelling this lookup should not cancel the underlying lookup + # or else the cancel event will get broadcast to all the waiters + # across all connections. + hosts = await asyncio.shield(host_resolved) + except asyncio.CancelledError: + + def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: + with suppress(Exception, asyncio.CancelledError): + fut.result() + + host_resolved.add_done_callback(drop_exception) + raise + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + # in case of proxy it is not ClientProxyConnectionError + # it is problem of resolving proxy ip itself + raise ClientConnectorError(req.connection_key, exc) from exc + + last_exc: Optional[Exception] = None + + for hinfo in hosts: + host = hinfo["host"] + port = hinfo["port"] + + try: + transp, proto = await self._wrap_create_connection( + self._factory, + host, + port, + timeout=timeout, + ssl=sslcontext, + family=hinfo["family"], + proto=hinfo["proto"], + flags=hinfo["flags"], + server_hostname=hinfo["hostname"] if sslcontext else None, + local_addr=self._local_addr, + req=req, + client_error=client_error, + ) + except ClientConnectorError as exc: + last_exc = exc + continue + + if req.is_ssl() and fingerprint: + try: + fingerprint.check(transp) + except ServerFingerprintMismatch as exc: + transp.close() + if not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transp) + last_exc = exc + continue + + return transp, proto + else: + assert last_exc is not None + raise last_exc + + async def _create_proxy_connection( + self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" + ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: + self._fail_on_no_start_tls(req) + runtime_has_start_tls = self._loop_supports_start_tls() + + headers: Dict[str, str] = {} + if req.proxy_headers is not None: + headers = req.proxy_headers # type: ignore[assignment] + headers[hdrs.HOST] = req.headers[hdrs.HOST] + + url = req.proxy + assert url is not None + proxy_req = ClientRequest( + hdrs.METH_GET, + url, + headers=headers, + auth=req.proxy_auth, + loop=self._loop, + ssl=req.ssl, + ) + + # create connection to proxy server + transport, proto = await self._create_direct_connection( + proxy_req, [], timeout, client_error=ClientProxyConnectionError + ) + + # Many HTTP proxies has buggy keepalive support. Let's not + # reuse connection but close it after processing every + # response. + proto.force_close() + + auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) + if auth is not None: + if not req.is_ssl(): + req.headers[hdrs.PROXY_AUTHORIZATION] = auth + else: + proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth + + if req.is_ssl(): + if runtime_has_start_tls: + self._warn_about_tls_in_tls(transport, req) + + # For HTTPS requests over HTTP proxy + # we must notify proxy to tunnel connection + # so we send CONNECT command: + # CONNECT www.python.org:443 HTTP/1.1 + # Host: www.python.org + # + # next we must do TLS handshake and so on + # to do this we must wrap raw socket into secure one + # asyncio handles this perfectly + proxy_req.method = hdrs.METH_CONNECT + proxy_req.url = req.url + key = attr.evolve( + req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None + ) + conn = Connection(self, key, proto, self._loop) + proxy_resp = await proxy_req.send(conn) + try: + protocol = conn._protocol + assert protocol is not None + + # read_until_eof=True will ensure the connection isn't closed + # once the response is received and processed allowing + # START_TLS to work on the connection below. + protocol.set_response_params(read_until_eof=runtime_has_start_tls) + resp = await proxy_resp.start(conn) + except BaseException: + proxy_resp.close() + conn.close() + raise + else: + conn._protocol = None + conn._transport = None + try: + if resp.status != 200: + message = resp.reason + if message is None: + message = RESPONSES[resp.status][0] + raise ClientHttpProxyError( + proxy_resp.request_info, + resp.history, + status=resp.status, + message=message, + headers=resp.headers, + ) + if not runtime_has_start_tls: + rawsock = transport.get_extra_info("socket", default=None) + if rawsock is None: + raise RuntimeError( + "Transport does not expose socket instance" + ) + # Duplicate the socket, so now we can close proxy transport + rawsock = rawsock.dup() + except BaseException: + # It shouldn't be closed in `finally` because it's fed to + # `loop.start_tls()` and the docs say not to touch it after + # passing there. + transport.close() + raise + finally: + if not runtime_has_start_tls: + transport.close() + + if not runtime_has_start_tls: + # HTTP proxy with support for upgrade to HTTPS + sslcontext = self._get_ssl_context(req) + return await self._wrap_create_connection( + self._factory, + timeout=timeout, + ssl=sslcontext, + sock=rawsock, + server_hostname=req.host, + req=req, + ) + + return await self._start_tls_connection( + # Access the old transport for the last time before it's + # closed and forgotten forever: + transport, + req=req, + timeout=timeout, + ) + finally: + proxy_resp.close() + + return transport, proto + + +class UnixConnector(BaseConnector): + """Unix socket connector. + + path - Unix socket path. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + loop - Optional event loop. + """ + + def __init__( + self, + path: str, + force_close: bool = False, + keepalive_timeout: Union[object, float, None] = sentinel, + limit: int = 100, + limit_per_host: int = 0, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__( + force_close=force_close, + keepalive_timeout=keepalive_timeout, + limit=limit, + limit_per_host=limit_per_host, + loop=loop, + ) + self._path = path + + @property + def path(self) -> str: + """Path to unix socket.""" + return self._path + + async def _create_connection( + self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + try: + async with ceil_timeout(timeout.sock_connect): + _, proto = await self._loop.create_unix_connection( + self._factory, self._path + ) + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc + + return cast(ResponseHandler, proto) + + +class NamedPipeConnector(BaseConnector): + """Named pipe connector. + + Only supported by the proactor event loop. + See also: https://docs.python.org/3.7/library/asyncio-eventloop.html + + path - Windows named pipe path. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + loop - Optional event loop. + """ + + def __init__( + self, + path: str, + force_close: bool = False, + keepalive_timeout: Union[object, float, None] = sentinel, + limit: int = 100, + limit_per_host: int = 0, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__( + force_close=force_close, + keepalive_timeout=keepalive_timeout, + limit=limit, + limit_per_host=limit_per_host, + loop=loop, + ) + if not isinstance( + self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] + ): + raise RuntimeError( + "Named Pipes only available in proactor " "loop under windows" + ) + self._path = path + + @property + def path(self) -> str: + """Path to the named pipe.""" + return self._path + + async def _create_connection( + self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + try: + async with ceil_timeout(timeout.sock_connect): + _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] # noqa: E501 + self._factory, self._path + ) + # the drain is required so that the connection_made is called + # and transport is set otherwise it is not set before the + # `assert conn.transport is not None` + # in client.py's _request method + await asyncio.sleep(0) + # other option is to manually set transport like + # `proto.transport = trans` + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise ClientConnectorError(req.connection_key, exc) from exc + + return cast(ResponseHandler, proto) diff --git a/venv/lib/python3.10/site-packages/aiohttp/cookiejar.py b/venv/lib/python3.10/site-packages/aiohttp/cookiejar.py new file mode 100644 index 0000000..6c88b47 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/cookiejar.py @@ -0,0 +1,415 @@ +import asyncio +import contextlib +import datetime +import os # noqa +import pathlib +import pickle +import re +from collections import defaultdict +from http.cookies import BaseCookie, Morsel, SimpleCookie +from typing import ( # noqa + DefaultDict, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Set, + Tuple, + Union, + cast, +) + +from yarl import URL + +from .abc import AbstractCookieJar, ClearCookiePredicate +from .helpers import is_ip_address, next_whole_second +from .typedefs import LooseCookies, PathLike, StrOrURL + +__all__ = ("CookieJar", "DummyCookieJar") + + +CookieItem = Union[str, "Morsel[str]"] + + +class CookieJar(AbstractCookieJar): + """Implements cookie storage adhering to RFC 6265.""" + + DATE_TOKENS_RE = re.compile( + r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*" + r"(?P[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)" + ) + + DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})") + + DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})") + + DATE_MONTH_RE = re.compile( + "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)", + re.I, + ) + + DATE_YEAR_RE = re.compile(r"(\d{2,4})") + + MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc) + + MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2**31 - 1) + + def __init__( + self, + *, + unsafe: bool = False, + quote_cookie: bool = True, + treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__(loop=loop) + self._cookies: DefaultDict[Tuple[str, str], SimpleCookie[str]] = defaultdict( + SimpleCookie + ) + self._host_only_cookies: Set[Tuple[str, str]] = set() + self._unsafe = unsafe + self._quote_cookie = quote_cookie + if treat_as_secure_origin is None: + treat_as_secure_origin = [] + elif isinstance(treat_as_secure_origin, URL): + treat_as_secure_origin = [treat_as_secure_origin.origin()] + elif isinstance(treat_as_secure_origin, str): + treat_as_secure_origin = [URL(treat_as_secure_origin).origin()] + else: + treat_as_secure_origin = [ + URL(url).origin() if isinstance(url, str) else url.origin() + for url in treat_as_secure_origin + ] + self._treat_as_secure_origin = treat_as_secure_origin + self._next_expiration = next_whole_second() + self._expirations: Dict[Tuple[str, str, str], datetime.datetime] = {} + # #4515: datetime.max may not be representable on 32-bit platforms + self._max_time = self.MAX_TIME + try: + self._max_time.timestamp() + except OverflowError: + self._max_time = self.MAX_32BIT_TIME + + def save(self, file_path: PathLike) -> None: + file_path = pathlib.Path(file_path) + with file_path.open(mode="wb") as f: + pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL) + + def load(self, file_path: PathLike) -> None: + file_path = pathlib.Path(file_path) + with file_path.open(mode="rb") as f: + self._cookies = pickle.load(f) + + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + if predicate is None: + self._next_expiration = next_whole_second() + self._cookies.clear() + self._host_only_cookies.clear() + self._expirations.clear() + return + + to_del = [] + now = datetime.datetime.now(datetime.timezone.utc) + for (domain, path), cookie in self._cookies.items(): + for name, morsel in cookie.items(): + key = (domain, path, name) + if ( + key in self._expirations and self._expirations[key] <= now + ) or predicate(morsel): + to_del.append(key) + + for domain, path, name in to_del: + self._host_only_cookies.discard((domain, name)) + key = (domain, path, name) + if key in self._expirations: + del self._expirations[(domain, path, name)] + self._cookies[(domain, path)].pop(name, None) + + next_expiration = min(self._expirations.values(), default=self._max_time) + try: + self._next_expiration = next_expiration.replace( + microsecond=0 + ) + datetime.timedelta(seconds=1) + except OverflowError: + self._next_expiration = self._max_time + + def clear_domain(self, domain: str) -> None: + self.clear(lambda x: self._is_domain_match(domain, x["domain"])) + + def __iter__(self) -> "Iterator[Morsel[str]]": + self._do_expiration() + for val in self._cookies.values(): + yield from val.values() + + def __len__(self) -> int: + return sum(1 for i in self) + + def _do_expiration(self) -> None: + self.clear(lambda x: False) + + def _expire_cookie( + self, when: datetime.datetime, domain: str, path: str, name: str + ) -> None: + self._next_expiration = min(self._next_expiration, when) + self._expirations[(domain, path, name)] = when + + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + """Update cookies.""" + hostname = response_url.raw_host + + if not self._unsafe and is_ip_address(hostname): + # Don't accept cookies from IPs + return + + if isinstance(cookies, Mapping): + cookies = cookies.items() + + for name, cookie in cookies: + if not isinstance(cookie, Morsel): + tmp: SimpleCookie[str] = SimpleCookie() + tmp[name] = cookie # type: ignore[assignment] + cookie = tmp[name] + + domain = cookie["domain"] + + # ignore domains with trailing dots + if domain.endswith("."): + domain = "" + del cookie["domain"] + + if not domain and hostname is not None: + # Set the cookie's domain to the response hostname + # and set its host-only-flag + self._host_only_cookies.add((hostname, name)) + domain = cookie["domain"] = hostname + + if domain.startswith("."): + # Remove leading dot + domain = domain[1:] + cookie["domain"] = domain + + if hostname and not self._is_domain_match(domain, hostname): + # Setting cookies for different domains is not allowed + continue + + path = cookie["path"] + if not path or not path.startswith("/"): + # Set the cookie's path to the response path + path = response_url.path + if not path.startswith("/"): + path = "/" + else: + # Cut everything from the last slash to the end + path = "/" + path[1 : path.rfind("/")] + cookie["path"] = path + + max_age = cookie["max-age"] + if max_age: + try: + delta_seconds = int(max_age) + try: + max_age_expiration = datetime.datetime.now( + datetime.timezone.utc + ) + datetime.timedelta(seconds=delta_seconds) + except OverflowError: + max_age_expiration = self._max_time + self._expire_cookie(max_age_expiration, domain, path, name) + except ValueError: + cookie["max-age"] = "" + + else: + expires = cookie["expires"] + if expires: + expire_time = self._parse_date(expires) + if expire_time: + self._expire_cookie(expire_time, domain, path, name) + else: + cookie["expires"] = "" + + self._cookies[(domain, path)][name] = cookie + + self._do_expiration() + + def filter_cookies( + self, request_url: URL = URL() + ) -> Union["BaseCookie[str]", "SimpleCookie[str]"]: + """Returns this jar's cookies filtered by their attributes.""" + self._do_expiration() + request_url = URL(request_url) + filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = ( + SimpleCookie() if self._quote_cookie else BaseCookie() + ) + hostname = request_url.raw_host or "" + request_origin = URL() + with contextlib.suppress(ValueError): + request_origin = request_url.origin() + + is_not_secure = ( + request_url.scheme not in ("https", "wss") + and request_origin not in self._treat_as_secure_origin + ) + + for cookie in self: + name = cookie.key + domain = cookie["domain"] + + # Send shared cookies + if not domain: + filtered[name] = cookie.value + continue + + if not self._unsafe and is_ip_address(hostname): + continue + + if (domain, name) in self._host_only_cookies: + if domain != hostname: + continue + elif not self._is_domain_match(domain, hostname): + continue + + if not self._is_path_match(request_url.path, cookie["path"]): + continue + + if is_not_secure and cookie["secure"]: + continue + + # It's critical we use the Morsel so the coded_value + # (based on cookie version) is preserved + mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) + mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) + filtered[name] = mrsl_val + + return filtered + + @staticmethod + def _is_domain_match(domain: str, hostname: str) -> bool: + """Implements domain matching adhering to RFC 6265.""" + if hostname == domain: + return True + + if not hostname.endswith(domain): + return False + + non_matching = hostname[: -len(domain)] + + if not non_matching.endswith("."): + return False + + return not is_ip_address(hostname) + + @staticmethod + def _is_path_match(req_path: str, cookie_path: str) -> bool: + """Implements path matching adhering to RFC 6265.""" + if not req_path.startswith("/"): + req_path = "/" + + if req_path == cookie_path: + return True + + if not req_path.startswith(cookie_path): + return False + + if cookie_path.endswith("/"): + return True + + non_matching = req_path[len(cookie_path) :] + + return non_matching.startswith("/") + + @classmethod + def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]: + """Implements date string parsing adhering to RFC 6265.""" + if not date_str: + return None + + found_time = False + found_day = False + found_month = False + found_year = False + + hour = minute = second = 0 + day = 0 + month = 0 + year = 0 + + for token_match in cls.DATE_TOKENS_RE.finditer(date_str): + + token = token_match.group("token") + + if not found_time: + time_match = cls.DATE_HMS_TIME_RE.match(token) + if time_match: + found_time = True + hour, minute, second = (int(s) for s in time_match.groups()) + continue + + if not found_day: + day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) + if day_match: + found_day = True + day = int(day_match.group()) + continue + + if not found_month: + month_match = cls.DATE_MONTH_RE.match(token) + if month_match: + found_month = True + assert month_match.lastindex is not None + month = month_match.lastindex + continue + + if not found_year: + year_match = cls.DATE_YEAR_RE.match(token) + if year_match: + found_year = True + year = int(year_match.group()) + + if 70 <= year <= 99: + year += 1900 + elif 0 <= year <= 69: + year += 2000 + + if False in (found_day, found_month, found_year, found_time): + return None + + if not 1 <= day <= 31: + return None + + if year < 1601 or hour > 23 or minute > 59 or second > 59: + return None + + return datetime.datetime( + year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc + ) + + +class DummyCookieJar(AbstractCookieJar): + """Implements a dummy cookie storage. + + It can be used with the ClientSession when no cookie processing is needed. + + """ + + def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + super().__init__(loop=loop) + + def __iter__(self) -> "Iterator[Morsel[str]]": + while False: + yield None + + def __len__(self) -> int: + return 0 + + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + pass + + def clear_domain(self, domain: str) -> None: + pass + + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + pass + + def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": + return SimpleCookie() diff --git a/venv/lib/python3.10/site-packages/aiohttp/formdata.py b/venv/lib/python3.10/site-packages/aiohttp/formdata.py new file mode 100644 index 0000000..e7cd24c --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/formdata.py @@ -0,0 +1,172 @@ +import io +from typing import Any, Iterable, List, Optional +from urllib.parse import urlencode + +from multidict import MultiDict, MultiDictProxy + +from . import hdrs, multipart, payload +from .helpers import guess_filename +from .payload import Payload + +__all__ = ("FormData",) + + +class FormData: + """Helper class for form body generation. + + Supports multipart/form-data and application/x-www-form-urlencoded. + """ + + def __init__( + self, + fields: Iterable[Any] = (), + quote_fields: bool = True, + charset: Optional[str] = None, + ) -> None: + self._writer = multipart.MultipartWriter("form-data") + self._fields: List[Any] = [] + self._is_multipart = False + self._is_processed = False + self._quote_fields = quote_fields + self._charset = charset + + if isinstance(fields, dict): + fields = list(fields.items()) + elif not isinstance(fields, (list, tuple)): + fields = (fields,) + self.add_fields(*fields) + + @property + def is_multipart(self) -> bool: + return self._is_multipart + + def add_field( + self, + name: str, + value: Any, + *, + content_type: Optional[str] = None, + filename: Optional[str] = None, + content_transfer_encoding: Optional[str] = None, + ) -> None: + + if isinstance(value, io.IOBase): + self._is_multipart = True + elif isinstance(value, (bytes, bytearray, memoryview)): + if filename is None and content_transfer_encoding is None: + filename = name + + type_options: MultiDict[str] = MultiDict({"name": name}) + if filename is not None and not isinstance(filename, str): + raise TypeError( + "filename must be an instance of str. " "Got: %s" % filename + ) + if filename is None and isinstance(value, io.IOBase): + filename = guess_filename(value, name) + if filename is not None: + type_options["filename"] = filename + self._is_multipart = True + + headers = {} + if content_type is not None: + if not isinstance(content_type, str): + raise TypeError( + "content_type must be an instance of str. " "Got: %s" % content_type + ) + headers[hdrs.CONTENT_TYPE] = content_type + self._is_multipart = True + if content_transfer_encoding is not None: + if not isinstance(content_transfer_encoding, str): + raise TypeError( + "content_transfer_encoding must be an instance" + " of str. Got: %s" % content_transfer_encoding + ) + headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding + self._is_multipart = True + + self._fields.append((type_options, headers, value)) + + def add_fields(self, *fields: Any) -> None: + to_add = list(fields) + + while to_add: + rec = to_add.pop(0) + + if isinstance(rec, io.IOBase): + k = guess_filename(rec, "unknown") + self.add_field(k, rec) # type: ignore[arg-type] + + elif isinstance(rec, (MultiDictProxy, MultiDict)): + to_add.extend(rec.items()) + + elif isinstance(rec, (list, tuple)) and len(rec) == 2: + k, fp = rec + self.add_field(k, fp) # type: ignore[arg-type] + + else: + raise TypeError( + "Only io.IOBase, multidict and (name, file) " + "pairs allowed, use .add_field() for passing " + "more complex parameters, got {!r}".format(rec) + ) + + def _gen_form_urlencoded(self) -> payload.BytesPayload: + # form data (x-www-form-urlencoded) + data = [] + for type_options, _, value in self._fields: + data.append((type_options["name"], value)) + + charset = self._charset if self._charset is not None else "utf-8" + + if charset == "utf-8": + content_type = "application/x-www-form-urlencoded" + else: + content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset + + return payload.BytesPayload( + urlencode(data, doseq=True, encoding=charset).encode(), + content_type=content_type, + ) + + def _gen_form_data(self) -> multipart.MultipartWriter: + """Encode a list of fields using the multipart/form-data MIME format""" + if self._is_processed: + raise RuntimeError("Form data has been processed already") + for dispparams, headers, value in self._fields: + try: + if hdrs.CONTENT_TYPE in headers: + part = payload.get_payload( + value, + content_type=headers[hdrs.CONTENT_TYPE], + headers=headers, + encoding=self._charset, + ) + else: + part = payload.get_payload( + value, headers=headers, encoding=self._charset + ) + except Exception as exc: + raise TypeError( + "Can not serialize value type: %r\n " + "headers: %r\n value: %r" % (type(value), headers, value) + ) from exc + + if dispparams: + part.set_content_disposition( + "form-data", quote_fields=self._quote_fields, **dispparams + ) + # FIXME cgi.FieldStorage doesn't likes body parts with + # Content-Length which were sent via chunked transfer encoding + assert part.headers is not None + part.headers.popall(hdrs.CONTENT_LENGTH, None) + + self._writer.append_payload(part) + + self._is_processed = True + return self._writer + + def __call__(self) -> Payload: + if self._is_multipart: + return self._gen_form_data() + else: + return self._gen_form_urlencoded() diff --git a/venv/lib/python3.10/site-packages/aiohttp/hdrs.py b/venv/lib/python3.10/site-packages/aiohttp/hdrs.py new file mode 100644 index 0000000..a619f25 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/hdrs.py @@ -0,0 +1,114 @@ +"""HTTP Headers constants.""" + +# After changing the file content call ./tools/gen.py +# to regenerate the headers parser +import sys +from typing import Set + +from multidict import istr + +if sys.version_info >= (3, 8): + from typing import Final +else: + from typing_extensions import Final + +METH_ANY: Final[str] = "*" +METH_CONNECT: Final[str] = "CONNECT" +METH_HEAD: Final[str] = "HEAD" +METH_GET: Final[str] = "GET" +METH_DELETE: Final[str] = "DELETE" +METH_OPTIONS: Final[str] = "OPTIONS" +METH_PATCH: Final[str] = "PATCH" +METH_POST: Final[str] = "POST" +METH_PUT: Final[str] = "PUT" +METH_TRACE: Final[str] = "TRACE" + +METH_ALL: Final[Set[str]] = { + METH_CONNECT, + METH_HEAD, + METH_GET, + METH_DELETE, + METH_OPTIONS, + METH_PATCH, + METH_POST, + METH_PUT, + METH_TRACE, +} + +ACCEPT: Final[istr] = istr("Accept") +ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset") +ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding") +ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language") +ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges") +ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age") +ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials") +ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers") +ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods") +ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin") +ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers") +ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers") +ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method") +AGE: Final[istr] = istr("Age") +ALLOW: Final[istr] = istr("Allow") +AUTHORIZATION: Final[istr] = istr("Authorization") +CACHE_CONTROL: Final[istr] = istr("Cache-Control") +CONNECTION: Final[istr] = istr("Connection") +CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition") +CONTENT_ENCODING: Final[istr] = istr("Content-Encoding") +CONTENT_LANGUAGE: Final[istr] = istr("Content-Language") +CONTENT_LENGTH: Final[istr] = istr("Content-Length") +CONTENT_LOCATION: Final[istr] = istr("Content-Location") +CONTENT_MD5: Final[istr] = istr("Content-MD5") +CONTENT_RANGE: Final[istr] = istr("Content-Range") +CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding") +CONTENT_TYPE: Final[istr] = istr("Content-Type") +COOKIE: Final[istr] = istr("Cookie") +DATE: Final[istr] = istr("Date") +DESTINATION: Final[istr] = istr("Destination") +DIGEST: Final[istr] = istr("Digest") +ETAG: Final[istr] = istr("Etag") +EXPECT: Final[istr] = istr("Expect") +EXPIRES: Final[istr] = istr("Expires") +FORWARDED: Final[istr] = istr("Forwarded") +FROM: Final[istr] = istr("From") +HOST: Final[istr] = istr("Host") +IF_MATCH: Final[istr] = istr("If-Match") +IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since") +IF_NONE_MATCH: Final[istr] = istr("If-None-Match") +IF_RANGE: Final[istr] = istr("If-Range") +IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since") +KEEP_ALIVE: Final[istr] = istr("Keep-Alive") +LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID") +LAST_MODIFIED: Final[istr] = istr("Last-Modified") +LINK: Final[istr] = istr("Link") +LOCATION: Final[istr] = istr("Location") +MAX_FORWARDS: Final[istr] = istr("Max-Forwards") +ORIGIN: Final[istr] = istr("Origin") +PRAGMA: Final[istr] = istr("Pragma") +PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate") +PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization") +RANGE: Final[istr] = istr("Range") +REFERER: Final[istr] = istr("Referer") +RETRY_AFTER: Final[istr] = istr("Retry-After") +SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept") +SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version") +SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol") +SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions") +SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key") +SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1") +SERVER: Final[istr] = istr("Server") +SET_COOKIE: Final[istr] = istr("Set-Cookie") +TE: Final[istr] = istr("TE") +TRAILER: Final[istr] = istr("Trailer") +TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding") +UPGRADE: Final[istr] = istr("Upgrade") +URI: Final[istr] = istr("URI") +USER_AGENT: Final[istr] = istr("User-Agent") +VARY: Final[istr] = istr("Vary") +VIA: Final[istr] = istr("Via") +WANT_DIGEST: Final[istr] = istr("Want-Digest") +WARNING: Final[istr] = istr("Warning") +WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate") +X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For") +X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host") +X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto") diff --git a/venv/lib/python3.10/site-packages/aiohttp/helpers.py b/venv/lib/python3.10/site-packages/aiohttp/helpers.py new file mode 100644 index 0000000..874ab1a --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/helpers.py @@ -0,0 +1,878 @@ +"""Various helper functions""" + +import asyncio +import base64 +import binascii +import datetime +import functools +import inspect +import netrc +import os +import platform +import re +import sys +import time +import warnings +import weakref +from collections import namedtuple +from contextlib import suppress +from email.parser import HeaderParser +from email.utils import parsedate +from math import ceil +from pathlib import Path +from types import TracebackType +from typing import ( + Any, + Callable, + ContextManager, + Dict, + Generator, + Generic, + Iterable, + Iterator, + List, + Mapping, + Optional, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) +from urllib.parse import quote +from urllib.request import getproxies, proxy_bypass + +import async_timeout +import attr +from multidict import MultiDict, MultiDictProxy +from yarl import URL + +from . import hdrs +from .log import client_logger, internal_logger +from .typedefs import PathLike, Protocol # noqa + +__all__ = ("BasicAuth", "ChainMapProxy", "ETag") + +IS_MACOS = platform.system() == "Darwin" +IS_WINDOWS = platform.system() == "Windows" + +PY_36 = sys.version_info >= (3, 6) +PY_37 = sys.version_info >= (3, 7) +PY_38 = sys.version_info >= (3, 8) +PY_310 = sys.version_info >= (3, 10) +PY_311 = sys.version_info >= (3, 11) + +if sys.version_info < (3, 7): + import idna_ssl + + idna_ssl.patch_match_hostname() + + def all_tasks( + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> Set["asyncio.Task[Any]"]: + tasks = list(asyncio.Task.all_tasks(loop)) + return {t for t in tasks if not t.done()} + +else: + all_tasks = asyncio.all_tasks + + +_T = TypeVar("_T") +_S = TypeVar("_S") + + +sentinel: Any = object() +NO_EXTENSIONS: bool = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) + +# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr +# for compatibility with older versions +DEBUG: bool = getattr(sys.flags, "dev_mode", False) or ( + not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG")) +) + + +CHAR = {chr(i) for i in range(0, 128)} +CTL = {chr(i) for i in range(0, 32)} | { + chr(127), +} +SEPARATORS = { + "(", + ")", + "<", + ">", + "@", + ",", + ";", + ":", + "\\", + '"', + "/", + "[", + "]", + "?", + "=", + "{", + "}", + " ", + chr(9), +} +TOKEN = CHAR ^ CTL ^ SEPARATORS + + +class noop: + def __await__(self) -> Generator[None, None, None]: + yield + + +class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])): + """Http basic authentication helper.""" + + def __new__( + cls, login: str, password: str = "", encoding: str = "latin1" + ) -> "BasicAuth": + if login is None: + raise ValueError("None is not allowed as login value") + + if password is None: + raise ValueError("None is not allowed as password value") + + if ":" in login: + raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)') + + return super().__new__(cls, login, password, encoding) + + @classmethod + def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth": + """Create a BasicAuth object from an Authorization HTTP header.""" + try: + auth_type, encoded_credentials = auth_header.split(" ", 1) + except ValueError: + raise ValueError("Could not parse authorization header.") + + if auth_type.lower() != "basic": + raise ValueError("Unknown authorization method %s" % auth_type) + + try: + decoded = base64.b64decode( + encoded_credentials.encode("ascii"), validate=True + ).decode(encoding) + except binascii.Error: + raise ValueError("Invalid base64 encoding.") + + try: + # RFC 2617 HTTP Authentication + # https://www.ietf.org/rfc/rfc2617.txt + # the colon must be present, but the username and password may be + # otherwise blank. + username, password = decoded.split(":", 1) + except ValueError: + raise ValueError("Invalid credentials.") + + return cls(username, password, encoding=encoding) + + @classmethod + def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]: + """Create BasicAuth from url.""" + if not isinstance(url, URL): + raise TypeError("url should be yarl.URL instance") + if url.user is None: + return None + return cls(url.user, url.password or "", encoding=encoding) + + def encode(self) -> str: + """Encode credentials.""" + creds = (f"{self.login}:{self.password}").encode(self.encoding) + return "Basic %s" % base64.b64encode(creds).decode(self.encoding) + + +def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: + auth = BasicAuth.from_url(url) + if auth is None: + return url, None + else: + return url.with_user(None), auth + + +def netrc_from_env() -> Optional[netrc.netrc]: + """Load netrc from file. + + Attempt to load it from the path specified by the env-var + NETRC or in the default location in the user's home directory. + + Returns None if it couldn't be found or fails to parse. + """ + netrc_env = os.environ.get("NETRC") + + if netrc_env is not None: + netrc_path = Path(netrc_env) + else: + try: + home_dir = Path.home() + except RuntimeError as e: # pragma: no cover + # if pathlib can't resolve home, it may raise a RuntimeError + client_logger.debug( + "Could not resolve home directory when " + "trying to look for .netrc file: %s", + e, + ) + return None + + netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc") + + try: + return netrc.netrc(str(netrc_path)) + except netrc.NetrcParseError as e: + client_logger.warning("Could not parse .netrc file: %s", e) + except OSError as e: + # we couldn't read the file (doesn't exist, permissions, etc.) + if netrc_env or netrc_path.is_file(): + # only warn if the environment wanted us to load it, + # or it appears like the default file does actually exist + client_logger.warning("Could not read .netrc file: %s", e) + + return None + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ProxyInfo: + proxy: URL + proxy_auth: Optional[BasicAuth] + + +def proxies_from_env() -> Dict[str, ProxyInfo]: + proxy_urls = { + k: URL(v) + for k, v in getproxies().items() + if k in ("http", "https", "ws", "wss") + } + netrc_obj = netrc_from_env() + stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} + ret = {} + for proto, val in stripped.items(): + proxy, auth = val + if proxy.scheme in ("https", "wss"): + client_logger.warning( + "%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy + ) + continue + if netrc_obj and auth is None: + auth_from_netrc = None + if proxy.host is not None: + auth_from_netrc = netrc_obj.authenticators(proxy.host) + if auth_from_netrc is not None: + # auth_from_netrc is a (`user`, `account`, `password`) tuple, + # `user` and `account` both can be username, + # if `user` is None, use `account` + *logins, password = auth_from_netrc + login = logins[0] if logins[0] else logins[-1] + auth = BasicAuth(cast(str, login), cast(str, password)) + ret[proto] = ProxyInfo(proxy, auth) + return ret + + +def current_task( + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> "Optional[asyncio.Task[Any]]": + if sys.version_info >= (3, 7): + return asyncio.current_task(loop=loop) + else: + return asyncio.Task.current_task(loop=loop) + + +def get_running_loop( + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> asyncio.AbstractEventLoop: + if loop is None: + loop = asyncio.get_event_loop() + if not loop.is_running(): + warnings.warn( + "The object should be created within an async function", + DeprecationWarning, + stacklevel=3, + ) + if loop.get_debug(): + internal_logger.warning( + "The object should be created within an async function", stack_info=True + ) + return loop + + +def isasyncgenfunction(obj: Any) -> bool: + func = getattr(inspect, "isasyncgenfunction", None) + if func is not None: + return func(obj) # type: ignore[no-any-return] + else: + return False + + +def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: + """Get a permitted proxy for the given URL from the env.""" + if url.host is not None and proxy_bypass(url.host): + raise LookupError(f"Proxying is disallowed for `{url.host!r}`") + + proxies_in_env = proxies_from_env() + try: + proxy_info = proxies_in_env[url.scheme] + except KeyError: + raise LookupError(f"No proxies found for `{url!s}` in the env") + else: + return proxy_info.proxy, proxy_info.proxy_auth + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class MimeType: + type: str + subtype: str + suffix: str + parameters: "MultiDictProxy[str]" + + +@functools.lru_cache(maxsize=56) +def parse_mimetype(mimetype: str) -> MimeType: + """Parses a MIME type into its components. + + mimetype is a MIME type string. + + Returns a MimeType object. + + Example: + + >>> parse_mimetype('text/html; charset=utf-8') + MimeType(type='text', subtype='html', suffix='', + parameters={'charset': 'utf-8'}) + + """ + if not mimetype: + return MimeType( + type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict()) + ) + + parts = mimetype.split(";") + params: MultiDict[str] = MultiDict() + for item in parts[1:]: + if not item: + continue + key, value = cast( + Tuple[str, str], item.split("=", 1) if "=" in item else (item, "") + ) + params.add(key.lower().strip(), value.strip(' "')) + + fulltype = parts[0].strip().lower() + if fulltype == "*": + fulltype = "*/*" + + mtype, stype = ( + cast(Tuple[str, str], fulltype.split("/", 1)) + if "/" in fulltype + else (fulltype, "") + ) + stype, suffix = ( + cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "") + ) + + return MimeType( + type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params) + ) + + +def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]: + name = getattr(obj, "name", None) + if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">": + return Path(name).name + return default + + +not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]") +QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"} + + +def quoted_string(content: str) -> str: + """Return 7-bit content as quoted-string. + + Format content into a quoted-string as defined in RFC5322 for + Internet Message Format. Notice that this is not the 8-bit HTTP + format, but the 7-bit email format. Content must be in usascii or + a ValueError is raised. + """ + if not (QCONTENT > set(content)): + raise ValueError(f"bad content for quoted-string {content!r}") + return not_qtext_re.sub(lambda x: "\\" + x.group(0), content) + + +def content_disposition_header( + disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str +) -> str: + """Sets ``Content-Disposition`` header for MIME. + + This is the MIME payload Content-Disposition header from RFC 2183 + and RFC 7579 section 4.2, not the HTTP Content-Disposition from + RFC 6266. + + disptype is a disposition type: inline, attachment, form-data. + Should be valid extension token (see RFC 2183) + + quote_fields performs value quoting to 7-bit MIME headers + according to RFC 7578. Set to quote_fields to False if recipient + can take 8-bit file names and field values. + + _charset specifies the charset to use when quote_fields is True. + + params is a dict with disposition params. + """ + if not disptype or not (TOKEN > set(disptype)): + raise ValueError("bad content disposition type {!r}" "".format(disptype)) + + value = disptype + if params: + lparams = [] + for key, val in params.items(): + if not key or not (TOKEN > set(key)): + raise ValueError( + "bad content disposition parameter" " {!r}={!r}".format(key, val) + ) + if quote_fields: + if key.lower() == "filename": + qval = quote(val, "", encoding=_charset) + lparams.append((key, '"%s"' % qval)) + else: + try: + qval = quoted_string(val) + except ValueError: + qval = "".join( + (_charset, "''", quote(val, "", encoding=_charset)) + ) + lparams.append((key + "*", qval)) + else: + lparams.append((key, '"%s"' % qval)) + else: + qval = val.replace("\\", "\\\\").replace('"', '\\"') + lparams.append((key, '"%s"' % qval)) + sparams = "; ".join("=".join(pair) for pair in lparams) + value = "; ".join((value, sparams)) + return value + + +class _TSelf(Protocol, Generic[_T]): + _cache: Dict[str, _T] + + +class reify(Generic[_T]): + """Use as a class method decorator. + + It operates almost exactly like + the Python `@property` decorator, but it puts the result of the + method it decorates into the instance dict after the first call, + effectively replacing the function it decorates with an instance + variable. It is, in Python parlance, a data descriptor. + """ + + def __init__(self, wrapped: Callable[..., _T]) -> None: + self.wrapped = wrapped + self.__doc__ = wrapped.__doc__ + self.name = wrapped.__name__ + + def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T: + try: + try: + return inst._cache[self.name] + except KeyError: + val = self.wrapped(inst) + inst._cache[self.name] = val + return val + except AttributeError: + if inst is None: + return self + raise + + def __set__(self, inst: _TSelf[_T], value: _T) -> None: + raise AttributeError("reified property is read-only") + + +reify_py = reify + +try: + from ._helpers import reify as reify_c + + if not NO_EXTENSIONS: + reify = reify_c # type: ignore[misc,assignment] +except ImportError: + pass + +_ipv4_pattern = ( + r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}" + r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" +) +_ipv6_pattern = ( + r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}" + r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)" + r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})" + r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}" + r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}" + r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)" + r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}" + r":|:(:[A-F0-9]{1,4}){7})$" +) +_ipv4_regex = re.compile(_ipv4_pattern) +_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE) +_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii")) +_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE) + + +def _is_ip_address( + regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]] +) -> bool: + if host is None: + return False + if isinstance(host, str): + return bool(regex.match(host)) + elif isinstance(host, (bytes, bytearray, memoryview)): + return bool(regexb.match(host)) + else: + raise TypeError(f"{host} [{type(host)}] is not a str or bytes") + + +is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb) +is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb) + + +def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool: + return is_ipv4_address(host) or is_ipv6_address(host) + + +def next_whole_second() -> datetime.datetime: + """Return current time rounded up to the next whole second.""" + return datetime.datetime.now(datetime.timezone.utc).replace( + microsecond=0 + ) + datetime.timedelta(seconds=0) + + +_cached_current_datetime: Optional[int] = None +_cached_formatted_datetime = "" + + +def rfc822_formatted_time() -> str: + global _cached_current_datetime + global _cached_formatted_datetime + + now = int(time.time()) + if now != _cached_current_datetime: + # Weekday and month names for HTTP date/time formatting; + # always English! + # Tuples are constants stored in codeobject! + _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun") + _monthname = ( + "", # Dummy so we can use 1-based month numbers + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", + ) + + year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now) + _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( + _weekdayname[wd], + day, + _monthname[month], + year, + hh, + mm, + ss, + ) + _cached_current_datetime = now + return _cached_formatted_datetime + + +def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None: + ref, name = info + ob = ref() + if ob is not None: + with suppress(Exception): + getattr(ob, name)() + + +def weakref_handle( + ob: object, name: str, timeout: float, loop: asyncio.AbstractEventLoop +) -> Optional[asyncio.TimerHandle]: + if timeout is not None and timeout > 0: + when = loop.time() + timeout + if timeout >= 5: + when = ceil(when) + + return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name)) + return None + + +def call_later( + cb: Callable[[], Any], timeout: float, loop: asyncio.AbstractEventLoop +) -> Optional[asyncio.TimerHandle]: + if timeout is not None and timeout > 0: + when = loop.time() + timeout + if timeout > 5: + when = ceil(when) + return loop.call_at(when, cb) + return None + + +class TimeoutHandle: + """Timeout handle""" + + def __init__( + self, loop: asyncio.AbstractEventLoop, timeout: Optional[float] + ) -> None: + self._timeout = timeout + self._loop = loop + self._callbacks: List[ + Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]] + ] = [] + + def register( + self, callback: Callable[..., None], *args: Any, **kwargs: Any + ) -> None: + self._callbacks.append((callback, args, kwargs)) + + def close(self) -> None: + self._callbacks.clear() + + def start(self) -> Optional[asyncio.Handle]: + timeout = self._timeout + if timeout is not None and timeout > 0: + when = self._loop.time() + timeout + if timeout >= 5: + when = ceil(when) + return self._loop.call_at(when, self.__call__) + else: + return None + + def timer(self) -> "BaseTimerContext": + if self._timeout is not None and self._timeout > 0: + timer = TimerContext(self._loop) + self.register(timer.timeout) + return timer + else: + return TimerNoop() + + def __call__(self) -> None: + for cb, args, kwargs in self._callbacks: + with suppress(Exception): + cb(*args, **kwargs) + + self._callbacks.clear() + + +class BaseTimerContext(ContextManager["BaseTimerContext"]): + pass + + +class TimerNoop(BaseTimerContext): + def __enter__(self) -> BaseTimerContext: + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + return + + +class TimerContext(BaseTimerContext): + """Low resolution timeout context manager""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._tasks: List[asyncio.Task[Any]] = [] + self._cancelled = False + + def __enter__(self) -> BaseTimerContext: + task = current_task(loop=self._loop) + + if task is None: + raise RuntimeError( + "Timeout context manager should be used " "inside a task" + ) + + if self._cancelled: + raise asyncio.TimeoutError from None + + self._tasks.append(task) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + if self._tasks: + self._tasks.pop() + + if exc_type is asyncio.CancelledError and self._cancelled: + raise asyncio.TimeoutError from None + return None + + def timeout(self) -> None: + if not self._cancelled: + for task in set(self._tasks): + task.cancel() + + self._cancelled = True + + +def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout: + if delay is None or delay <= 0: + return async_timeout.timeout(None) + + loop = get_running_loop() + now = loop.time() + when = now + delay + if delay > 5: + when = ceil(when) + return async_timeout.timeout_at(when) + + +class HeadersMixin: + + ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"]) + + _content_type: Optional[str] = None + _content_dict: Optional[Dict[str, str]] = None + _stored_content_type = sentinel + + def _parse_content_type(self, raw: str) -> None: + self._stored_content_type = raw + if raw is None: + # default value according to RFC 2616 + self._content_type = "application/octet-stream" + self._content_dict = {} + else: + msg = HeaderParser().parsestr("Content-Type: " + raw) + self._content_type = msg.get_content_type() + params = msg.get_params() + self._content_dict = dict(params[1:]) # First element is content type again + + @property + def content_type(self) -> str: + """The value of content part for Content-Type HTTP header.""" + raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined] + if self._stored_content_type != raw: + self._parse_content_type(raw) + return self._content_type # type: ignore[return-value] + + @property + def charset(self) -> Optional[str]: + """The value of charset part for Content-Type HTTP header.""" + raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined] + if self._stored_content_type != raw: + self._parse_content_type(raw) + return self._content_dict.get("charset") # type: ignore[union-attr] + + @property + def content_length(self) -> Optional[int]: + """The value of Content-Length HTTP header.""" + content_length = self._headers.get( # type: ignore[attr-defined] + hdrs.CONTENT_LENGTH + ) + + if content_length is not None: + return int(content_length) + else: + return None + + +def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: + if not fut.done(): + fut.set_result(result) + + +def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None: + if not fut.done(): + fut.set_exception(exc) + + +class ChainMapProxy(Mapping[str, Any]): + __slots__ = ("_maps",) + + def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None: + self._maps = tuple(maps) + + def __init_subclass__(cls) -> None: + raise TypeError( + "Inheritance class {} from ChainMapProxy " + "is forbidden".format(cls.__name__) + ) + + def __getitem__(self, key: str) -> Any: + for mapping in self._maps: + try: + return mapping[key] + except KeyError: + pass + raise KeyError(key) + + def get(self, key: str, default: Any = None) -> Any: + return self[key] if key in self else default + + def __len__(self) -> int: + # reuses stored hash values if possible + return len(set().union(*self._maps)) # type: ignore[arg-type] + + def __iter__(self) -> Iterator[str]: + d: Dict[str, Any] = {} + for mapping in reversed(self._maps): + # reuses stored hash values if possible + d.update(mapping) + return iter(d) + + def __contains__(self, key: object) -> bool: + return any(key in m for m in self._maps) + + def __bool__(self) -> bool: + return any(self._maps) + + def __repr__(self) -> str: + content = ", ".join(map(repr, self._maps)) + return f"ChainMapProxy({content})" + + +# https://tools.ietf.org/html/rfc7232#section-2.3 +_ETAGC = r"[!#-}\x80-\xff]+" +_ETAGC_RE = re.compile(_ETAGC) +_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"' +QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG) +LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)") + +ETAG_ANY = "*" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ETag: + value: str + is_weak: bool = False + + +def validate_etag_value(value: str) -> None: + if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value): + raise ValueError( + f"Value {value!r} is not a valid etag. Maybe it contains '\"'?" + ) + + +def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]: + """Process a date string, return a datetime object""" + if date_str is not None: + timetuple = parsedate(date_str) + if timetuple is not None: + with suppress(ValueError): + return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) + return None diff --git a/venv/lib/python3.10/site-packages/aiohttp/http.py b/venv/lib/python3.10/site-packages/aiohttp/http.py new file mode 100644 index 0000000..ca9dc54 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/http.py @@ -0,0 +1,70 @@ +import http.server +import sys +from typing import Mapping, Tuple + +from . import __version__ +from .http_exceptions import HttpProcessingError as HttpProcessingError +from .http_parser import ( + HeadersParser as HeadersParser, + HttpParser as HttpParser, + HttpRequestParser as HttpRequestParser, + HttpResponseParser as HttpResponseParser, + RawRequestMessage as RawRequestMessage, + RawResponseMessage as RawResponseMessage, +) +from .http_websocket import ( + WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE, + WS_KEY as WS_KEY, + WebSocketError as WebSocketError, + WebSocketReader as WebSocketReader, + WebSocketWriter as WebSocketWriter, + WSCloseCode as WSCloseCode, + WSMessage as WSMessage, + WSMsgType as WSMsgType, + ws_ext_gen as ws_ext_gen, + ws_ext_parse as ws_ext_parse, +) +from .http_writer import ( + HttpVersion as HttpVersion, + HttpVersion10 as HttpVersion10, + HttpVersion11 as HttpVersion11, + StreamWriter as StreamWriter, +) + +__all__ = ( + "HttpProcessingError", + "RESPONSES", + "SERVER_SOFTWARE", + # .http_writer + "StreamWriter", + "HttpVersion", + "HttpVersion10", + "HttpVersion11", + # .http_parser + "HeadersParser", + "HttpParser", + "HttpRequestParser", + "HttpResponseParser", + "RawRequestMessage", + "RawResponseMessage", + # .http_websocket + "WS_CLOSED_MESSAGE", + "WS_CLOSING_MESSAGE", + "WS_KEY", + "WebSocketReader", + "WebSocketWriter", + "ws_ext_gen", + "ws_ext_parse", + "WSMessage", + "WebSocketError", + "WSMsgType", + "WSCloseCode", +) + + +SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format( + sys.version_info, __version__ +) + +RESPONSES: Mapping[int, Tuple[str, str]] = http.server.BaseHTTPRequestHandler.responses diff --git a/venv/lib/python3.10/site-packages/aiohttp/http_exceptions.py b/venv/lib/python3.10/site-packages/aiohttp/http_exceptions.py new file mode 100644 index 0000000..c885f80 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/http_exceptions.py @@ -0,0 +1,105 @@ +"""Low-level http related exceptions.""" + + +from typing import Optional, Union + +from .typedefs import _CIMultiDict + +__all__ = ("HttpProcessingError",) + + +class HttpProcessingError(Exception): + """HTTP error. + + Shortcut for raising HTTP errors with custom code, message and headers. + + code: HTTP Error code. + message: (optional) Error message. + headers: (optional) Headers to be sent in response, a list of pairs + """ + + code = 0 + message = "" + headers = None + + def __init__( + self, + *, + code: Optional[int] = None, + message: str = "", + headers: Optional[_CIMultiDict] = None, + ) -> None: + if code is not None: + self.code = code + self.headers = headers + self.message = message + + def __str__(self) -> str: + return f"{self.code}, message={self.message!r}" + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}: {self}>" + + +class BadHttpMessage(HttpProcessingError): + + code = 400 + message = "Bad Request" + + def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None: + super().__init__(message=message, headers=headers) + self.args = (message,) + + +class HttpBadRequest(BadHttpMessage): + + code = 400 + message = "Bad Request" + + +class PayloadEncodingError(BadHttpMessage): + """Base class for payload errors""" + + +class ContentEncodingError(PayloadEncodingError): + """Content encoding error.""" + + +class TransferEncodingError(PayloadEncodingError): + """transfer encoding error.""" + + +class ContentLengthError(PayloadEncodingError): + """Not enough data for satisfy content length header.""" + + +class LineTooLong(BadHttpMessage): + def __init__( + self, line: str, limit: str = "Unknown", actual_size: str = "Unknown" + ) -> None: + super().__init__( + f"Got more than {limit} bytes ({actual_size}) when reading {line}." + ) + self.args = (line, limit, actual_size) + + +class InvalidHeader(BadHttpMessage): + def __init__(self, hdr: Union[bytes, str]) -> None: + if isinstance(hdr, bytes): + hdr = hdr.decode("utf-8", "surrogateescape") + super().__init__(f"Invalid HTTP Header: {hdr}") + self.hdr = hdr + self.args = (hdr,) + + +class BadStatusLine(BadHttpMessage): + def __init__(self, line: str = "") -> None: + if not isinstance(line, str): + line = repr(line) + super().__init__(f"Bad status line {line!r}") + self.args = (line,) + self.line = line + + +class InvalidURLError(BadHttpMessage): + pass diff --git a/venv/lib/python3.10/site-packages/aiohttp/http_parser.py b/venv/lib/python3.10/site-packages/aiohttp/http_parser.py new file mode 100644 index 0000000..5a66ce4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/http_parser.py @@ -0,0 +1,969 @@ +import abc +import asyncio +import collections +import re +import string +import zlib +from contextlib import suppress +from enum import IntEnum +from typing import ( + Any, + Generic, + List, + NamedTuple, + Optional, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) + +from multidict import CIMultiDict, CIMultiDictProxy, istr +from yarl import URL + +from . import hdrs +from .base_protocol import BaseProtocol +from .helpers import NO_EXTENSIONS, BaseTimerContext +from .http_exceptions import ( + BadHttpMessage, + BadStatusLine, + ContentEncodingError, + ContentLengthError, + InvalidHeader, + LineTooLong, + TransferEncodingError, +) +from .http_writer import HttpVersion, HttpVersion10 +from .log import internal_logger +from .streams import EMPTY_PAYLOAD, StreamReader +from .typedefs import Final, RawHeaders + +try: + import brotli + + HAS_BROTLI = True +except ImportError: # pragma: no cover + HAS_BROTLI = False + + +__all__ = ( + "HeadersParser", + "HttpParser", + "HttpRequestParser", + "HttpResponseParser", + "RawRequestMessage", + "RawResponseMessage", +) + +ASCIISET: Final[Set[str]] = set(string.printable) + +# See https://tools.ietf.org/html/rfc7230#section-3.1.1 +# and https://tools.ietf.org/html/rfc7230#appendix-B +# +# method = token +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." / +# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA +# token = 1*tchar +METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+") +VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d+).(\d+)") +HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]") + + +class RawRequestMessage(NamedTuple): + method: str + path: str + version: HttpVersion + headers: "CIMultiDictProxy[str]" + raw_headers: RawHeaders + should_close: bool + compression: Optional[str] + upgrade: bool + chunked: bool + url: URL + + +RawResponseMessage = collections.namedtuple( + "RawResponseMessage", + [ + "version", + "code", + "reason", + "headers", + "raw_headers", + "should_close", + "compression", + "upgrade", + "chunked", + ], +) + + +_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage) + + +class ParseState(IntEnum): + + PARSE_NONE = 0 + PARSE_LENGTH = 1 + PARSE_CHUNKED = 2 + PARSE_UNTIL_EOF = 3 + + +class ChunkState(IntEnum): + PARSE_CHUNKED_SIZE = 0 + PARSE_CHUNKED_CHUNK = 1 + PARSE_CHUNKED_CHUNK_EOF = 2 + PARSE_MAYBE_TRAILERS = 3 + PARSE_TRAILERS = 4 + + +class HeadersParser: + def __init__( + self, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + ) -> None: + self.max_line_size = max_line_size + self.max_headers = max_headers + self.max_field_size = max_field_size + + def parse_headers( + self, lines: List[bytes] + ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]: + headers: CIMultiDict[str] = CIMultiDict() + raw_headers = [] + + lines_idx = 1 + line = lines[1] + line_count = len(lines) + + while line: + # Parse initial header name : value pair. + try: + bname, bvalue = line.split(b":", 1) + except ValueError: + raise InvalidHeader(line) from None + + bname = bname.strip(b" \t") + bvalue = bvalue.lstrip() + if HDRRE.search(bname): + raise InvalidHeader(bname) + if len(bname) > self.max_field_size: + raise LineTooLong( + "request header name {}".format( + bname.decode("utf8", "xmlcharrefreplace") + ), + str(self.max_field_size), + str(len(bname)), + ) + + header_length = len(bvalue) + + # next line + lines_idx += 1 + line = lines[lines_idx] + + # consume continuation lines + continuation = line and line[0] in (32, 9) # (' ', '\t') + + if continuation: + bvalue_lst = [bvalue] + while continuation: + header_length += len(line) + if header_length > self.max_field_size: + raise LineTooLong( + "request header field {}".format( + bname.decode("utf8", "xmlcharrefreplace") + ), + str(self.max_field_size), + str(header_length), + ) + bvalue_lst.append(line) + + # next line + lines_idx += 1 + if lines_idx < line_count: + line = lines[lines_idx] + if line: + continuation = line[0] in (32, 9) # (' ', '\t') + else: + line = b"" + break + bvalue = b"".join(bvalue_lst) + else: + if header_length > self.max_field_size: + raise LineTooLong( + "request header field {}".format( + bname.decode("utf8", "xmlcharrefreplace") + ), + str(self.max_field_size), + str(header_length), + ) + + bvalue = bvalue.strip() + name = bname.decode("utf-8", "surrogateescape") + value = bvalue.decode("utf-8", "surrogateescape") + + headers.add(name, value) + raw_headers.append((bname, bvalue)) + + return (CIMultiDictProxy(headers), tuple(raw_headers)) + + +class HttpParser(abc.ABC, Generic[_MsgT]): + def __init__( + self, + protocol: Optional[BaseProtocol] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + limit: int = 2**16, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + timer: Optional[BaseTimerContext] = None, + code: Optional[int] = None, + method: Optional[str] = None, + readall: bool = False, + payload_exception: Optional[Type[BaseException]] = None, + response_with_body: bool = True, + read_until_eof: bool = False, + auto_decompress: bool = True, + ) -> None: + self.protocol = protocol + self.loop = loop + self.max_line_size = max_line_size + self.max_headers = max_headers + self.max_field_size = max_field_size + self.timer = timer + self.code = code + self.method = method + self.readall = readall + self.payload_exception = payload_exception + self.response_with_body = response_with_body + self.read_until_eof = read_until_eof + + self._lines: List[bytes] = [] + self._tail = b"" + self._upgraded = False + self._payload = None + self._payload_parser: Optional[HttpPayloadParser] = None + self._auto_decompress = auto_decompress + self._limit = limit + self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size) + + @abc.abstractmethod + def parse_message(self, lines: List[bytes]) -> _MsgT: + pass + + def feed_eof(self) -> Optional[_MsgT]: + if self._payload_parser is not None: + self._payload_parser.feed_eof() + self._payload_parser = None + else: + # try to extract partial message + if self._tail: + self._lines.append(self._tail) + + if self._lines: + if self._lines[-1] != "\r\n": + self._lines.append(b"") + with suppress(Exception): + return self.parse_message(self._lines) + return None + + def feed_data( + self, + data: bytes, + SEP: bytes = b"\r\n", + EMPTY: bytes = b"", + CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH, + METH_CONNECT: str = hdrs.METH_CONNECT, + SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1, + ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]: + + messages = [] + + if self._tail: + data, self._tail = self._tail + data, b"" + + data_len = len(data) + start_pos = 0 + loop = self.loop + + while start_pos < data_len: + + # read HTTP message (request/response line + headers), \r\n\r\n + # and split by lines + if self._payload_parser is None and not self._upgraded: + pos = data.find(SEP, start_pos) + # consume \r\n + if pos == start_pos and not self._lines: + start_pos = pos + 2 + continue + + if pos >= start_pos: + # line found + self._lines.append(data[start_pos:pos]) + start_pos = pos + 2 + + # \r\n\r\n found + if self._lines[-1] == EMPTY: + try: + msg: _MsgT = self.parse_message(self._lines) + finally: + self._lines.clear() + + def get_content_length() -> Optional[int]: + # payload length + length_hdr = msg.headers.get(CONTENT_LENGTH) + if length_hdr is None: + return None + + try: + length = int(length_hdr) + except ValueError: + raise InvalidHeader(CONTENT_LENGTH) + + if length < 0: + raise InvalidHeader(CONTENT_LENGTH) + + return length + + length = get_content_length() + # do not support old websocket spec + if SEC_WEBSOCKET_KEY1 in msg.headers: + raise InvalidHeader(SEC_WEBSOCKET_KEY1) + + self._upgraded = msg.upgrade + + method = getattr(msg, "method", self.method) + + assert self.protocol is not None + # calculate payload + if ( + (length is not None and length > 0) + or msg.chunked + and not msg.upgrade + ): + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + payload_parser = HttpPayloadParser( + payload, + length=length, + chunked=msg.chunked, + method=method, + compression=msg.compression, + code=self.code, + readall=self.readall, + response_with_body=self.response_with_body, + auto_decompress=self._auto_decompress, + ) + if not payload_parser.done: + self._payload_parser = payload_parser + elif method == METH_CONNECT: + assert isinstance(msg, RawRequestMessage) + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + self._upgraded = True + self._payload_parser = HttpPayloadParser( + payload, + method=msg.method, + compression=msg.compression, + readall=True, + auto_decompress=self._auto_decompress, + ) + else: + if ( + getattr(msg, "code", 100) >= 199 + and length is None + and self.read_until_eof + ): + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + payload_parser = HttpPayloadParser( + payload, + length=length, + chunked=msg.chunked, + method=method, + compression=msg.compression, + code=self.code, + readall=True, + response_with_body=self.response_with_body, + auto_decompress=self._auto_decompress, + ) + if not payload_parser.done: + self._payload_parser = payload_parser + else: + payload = EMPTY_PAYLOAD + + messages.append((msg, payload)) + else: + self._tail = data[start_pos:] + data = EMPTY + break + + # no parser, just store + elif self._payload_parser is None and self._upgraded: + assert not self._lines + break + + # feed payload + elif data and start_pos < data_len: + assert not self._lines + assert self._payload_parser is not None + try: + eof, data = self._payload_parser.feed_data(data[start_pos:]) + except BaseException as exc: + if self.payload_exception is not None: + self._payload_parser.payload.set_exception( + self.payload_exception(str(exc)) + ) + else: + self._payload_parser.payload.set_exception(exc) + + eof = True + data = b"" + + if eof: + start_pos = 0 + data_len = len(data) + self._payload_parser = None + continue + else: + break + + if data and start_pos < data_len: + data = data[start_pos:] + else: + data = EMPTY + + return messages, self._upgraded, data + + def parse_headers( + self, lines: List[bytes] + ) -> Tuple[ + "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool + ]: + """Parses RFC 5322 headers from a stream. + + Line continuations are supported. Returns list of header name + and value pairs. Header name is in upper case. + """ + headers, raw_headers = self._headers_parser.parse_headers(lines) + close_conn = None + encoding = None + upgrade = False + chunked = False + + # keep-alive + conn = headers.get(hdrs.CONNECTION) + if conn: + v = conn.lower() + if v == "close": + close_conn = True + elif v == "keep-alive": + close_conn = False + elif v == "upgrade": + upgrade = True + + # encoding + enc = headers.get(hdrs.CONTENT_ENCODING) + if enc: + enc = enc.lower() + if enc in ("gzip", "deflate", "br"): + encoding = enc + + # chunking + te = headers.get(hdrs.TRANSFER_ENCODING) + if te is not None: + if "chunked" == te.lower(): + chunked = True + else: + raise BadHttpMessage("Request has invalid `Transfer-Encoding`") + + if hdrs.CONTENT_LENGTH in headers: + raise BadHttpMessage( + "Content-Length can't be present with Transfer-Encoding", + ) + + return (headers, raw_headers, close_conn, encoding, upgrade, chunked) + + def set_upgraded(self, val: bool) -> None: + """Set connection upgraded (to websocket) mode. + + :param bool val: new state. + """ + self._upgraded = val + + +class HttpRequestParser(HttpParser[RawRequestMessage]): + """Read request status line. + + Exception .http_exceptions.BadStatusLine + could be raised in case of any errors in status line. + Returns RawRequestMessage. + """ + + def parse_message(self, lines: List[bytes]) -> RawRequestMessage: + # request line + line = lines[0].decode("utf-8", "surrogateescape") + try: + method, path, version = line.split(None, 2) + except ValueError: + raise BadStatusLine(line) from None + + if len(path) > self.max_line_size: + raise LineTooLong( + "Status line is too long", str(self.max_line_size), str(len(path)) + ) + + # method + if not METHRE.match(method): + raise BadStatusLine(method) + + # version + try: + if version.startswith("HTTP/"): + n1, n2 = version[5:].split(".", 1) + version_o = HttpVersion(int(n1), int(n2)) + else: + raise BadStatusLine(version) + except Exception: + raise BadStatusLine(version) + + if method == "CONNECT": + # authority-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 + url = URL.build(authority=path, encoded=True) + elif path.startswith("/"): + # origin-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 + path_part, _hash_separator, url_fragment = path.partition("#") + path_part, _question_mark_separator, qs_part = path_part.partition("?") + + # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based + # NOTE: parser does, otherwise it results into the same + # NOTE: HTTP Request-Line input producing different + # NOTE: `yarl.URL()` objects + url = URL.build( + path=path_part, + query_string=qs_part, + fragment=url_fragment, + encoded=True, + ) + else: + # absolute-form for proxy maybe, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 + url = URL(path, encoded=True) + + # read headers + ( + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) = self.parse_headers(lines) + + if close is None: # then the headers weren't set in the request + if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close + close = True + else: # HTTP 1.1 must ask to close. + close = False + + return RawRequestMessage( + method, + path, + version_o, + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + url, + ) + + +class HttpResponseParser(HttpParser[RawResponseMessage]): + """Read response status line and headers. + + BadStatusLine could be raised in case of any errors in status line. + Returns RawResponseMessage. + """ + + def parse_message(self, lines: List[bytes]) -> RawResponseMessage: + line = lines[0].decode("utf-8", "surrogateescape") + try: + version, status = line.split(None, 1) + except ValueError: + raise BadStatusLine(line) from None + + try: + status, reason = status.split(None, 1) + except ValueError: + reason = "" + + if len(reason) > self.max_line_size: + raise LineTooLong( + "Status line is too long", str(self.max_line_size), str(len(reason)) + ) + + # version + match = VERSRE.match(version) + if match is None: + raise BadStatusLine(line) + version_o = HttpVersion(int(match.group(1)), int(match.group(2))) + + # The status code is a three-digit number + try: + status_i = int(status) + except ValueError: + raise BadStatusLine(line) from None + + if status_i > 999: + raise BadStatusLine(line) + + # read headers + ( + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) = self.parse_headers(lines) + + if close is None: + close = version_o <= HttpVersion10 + + return RawResponseMessage( + version_o, + status_i, + reason.strip(), + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) + + +class HttpPayloadParser: + def __init__( + self, + payload: StreamReader, + length: Optional[int] = None, + chunked: bool = False, + compression: Optional[str] = None, + code: Optional[int] = None, + method: Optional[str] = None, + readall: bool = False, + response_with_body: bool = True, + auto_decompress: bool = True, + ) -> None: + self._length = 0 + self._type = ParseState.PARSE_NONE + self._chunk = ChunkState.PARSE_CHUNKED_SIZE + self._chunk_size = 0 + self._chunk_tail = b"" + self._auto_decompress = auto_decompress + self.done = False + + # payload decompression wrapper + if response_with_body and compression and self._auto_decompress: + real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer( + payload, compression + ) + else: + real_payload = payload + + # payload parser + if not response_with_body: + # don't parse payload if it's not expected to be received + self._type = ParseState.PARSE_NONE + real_payload.feed_eof() + self.done = True + + elif chunked: + self._type = ParseState.PARSE_CHUNKED + elif length is not None: + self._type = ParseState.PARSE_LENGTH + self._length = length + if self._length == 0: + real_payload.feed_eof() + self.done = True + else: + if readall and code != 204: + self._type = ParseState.PARSE_UNTIL_EOF + elif method in ("PUT", "POST"): + internal_logger.warning( # pragma: no cover + "Content-Length or Transfer-Encoding header is required" + ) + self._type = ParseState.PARSE_NONE + real_payload.feed_eof() + self.done = True + + self.payload = real_payload + + def feed_eof(self) -> None: + if self._type == ParseState.PARSE_UNTIL_EOF: + self.payload.feed_eof() + elif self._type == ParseState.PARSE_LENGTH: + raise ContentLengthError( + "Not enough data for satisfy content length header." + ) + elif self._type == ParseState.PARSE_CHUNKED: + raise TransferEncodingError( + "Not enough data for satisfy transfer length header." + ) + + def feed_data( + self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";" + ) -> Tuple[bool, bytes]: + # Read specified amount of bytes + if self._type == ParseState.PARSE_LENGTH: + required = self._length + chunk_len = len(chunk) + + if required >= chunk_len: + self._length = required - chunk_len + self.payload.feed_data(chunk, chunk_len) + if self._length == 0: + self.payload.feed_eof() + return True, b"" + else: + self._length = 0 + self.payload.feed_data(chunk[:required], required) + self.payload.feed_eof() + return True, chunk[required:] + + # Chunked transfer encoding parser + elif self._type == ParseState.PARSE_CHUNKED: + if self._chunk_tail: + chunk = self._chunk_tail + chunk + self._chunk_tail = b"" + + while chunk: + + # read next chunk size + if self._chunk == ChunkState.PARSE_CHUNKED_SIZE: + pos = chunk.find(SEP) + if pos >= 0: + i = chunk.find(CHUNK_EXT, 0, pos) + if i >= 0: + size_b = chunk[:i] # strip chunk-extensions + else: + size_b = chunk[:pos] + + try: + size = int(bytes(size_b), 16) + except ValueError: + exc = TransferEncodingError( + chunk[:pos].decode("ascii", "surrogateescape") + ) + self.payload.set_exception(exc) + raise exc from None + + chunk = chunk[pos + 2 :] + if size == 0: # eof marker + self._chunk = ChunkState.PARSE_MAYBE_TRAILERS + else: + self._chunk = ChunkState.PARSE_CHUNKED_CHUNK + self._chunk_size = size + self.payload.begin_http_chunk_receiving() + else: + self._chunk_tail = chunk + return False, b"" + + # read chunk and feed buffer + if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK: + required = self._chunk_size + chunk_len = len(chunk) + + if required > chunk_len: + self._chunk_size = required - chunk_len + self.payload.feed_data(chunk, chunk_len) + return False, b"" + else: + self._chunk_size = 0 + self.payload.feed_data(chunk[:required], required) + chunk = chunk[required:] + self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF + self.payload.end_http_chunk_receiving() + + # toss the CRLF at the end of the chunk + if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF: + if chunk[:2] == SEP: + chunk = chunk[2:] + self._chunk = ChunkState.PARSE_CHUNKED_SIZE + else: + self._chunk_tail = chunk + return False, b"" + + # if stream does not contain trailer, after 0\r\n + # we should get another \r\n otherwise + # trailers needs to be skiped until \r\n\r\n + if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS: + head = chunk[:2] + if head == SEP: + # end of stream + self.payload.feed_eof() + return True, chunk[2:] + # Both CR and LF, or only LF may not be received yet. It is + # expected that CRLF or LF will be shown at the very first + # byte next time, otherwise trailers should come. The last + # CRLF which marks the end of response might not be + # contained in the same TCP segment which delivered the + # size indicator. + if not head: + return False, b"" + if head == SEP[:1]: + self._chunk_tail = head + return False, b"" + self._chunk = ChunkState.PARSE_TRAILERS + + # read and discard trailer up to the CRLF terminator + if self._chunk == ChunkState.PARSE_TRAILERS: + pos = chunk.find(SEP) + if pos >= 0: + chunk = chunk[pos + 2 :] + self._chunk = ChunkState.PARSE_MAYBE_TRAILERS + else: + self._chunk_tail = chunk + return False, b"" + + # Read all bytes until eof + elif self._type == ParseState.PARSE_UNTIL_EOF: + self.payload.feed_data(chunk, len(chunk)) + + return False, b"" + + +class DeflateBuffer: + """DeflateStream decompress stream and feed data into specified stream.""" + + decompressor: Any + + def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: + self.out = out + self.size = 0 + self.encoding = encoding + self._started_decoding = False + + if encoding == "br": + if not HAS_BROTLI: # pragma: no cover + raise ContentEncodingError( + "Can not decode content-encoding: brotli (br). " + "Please install `Brotli`" + ) + + class BrotliDecoder: + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self) -> None: + self._obj = brotli.Decompressor() + + def decompress(self, data: bytes) -> bytes: + if hasattr(self._obj, "decompress"): + return cast(bytes, self._obj.decompress(data)) + return cast(bytes, self._obj.process(data)) + + def flush(self) -> bytes: + if hasattr(self._obj, "flush"): + return cast(bytes, self._obj.flush()) + return b"" + + self.decompressor = BrotliDecoder() + else: + zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS + self.decompressor = zlib.decompressobj(wbits=zlib_mode) + + def set_exception(self, exc: BaseException) -> None: + self.out.set_exception(exc) + + def feed_data(self, chunk: bytes, size: int) -> None: + if not size: + return + + self.size += size + + # RFC1950 + # bits 0..3 = CM = 0b1000 = 8 = "deflate" + # bits 4..7 = CINFO = 1..7 = windows size. + if ( + not self._started_decoding + and self.encoding == "deflate" + and chunk[0] & 0xF != 8 + ): + # Change the decoder to decompress incorrectly compressed data + # Actually we should issue a warning about non-RFC-compliant data. + self.decompressor = zlib.decompressobj(wbits=-zlib.MAX_WBITS) + + try: + chunk = self.decompressor.decompress(chunk) + except Exception: + raise ContentEncodingError( + "Can not decode content-encoding: %s" % self.encoding + ) + + self._started_decoding = True + + if chunk: + self.out.feed_data(chunk, len(chunk)) + + def feed_eof(self) -> None: + chunk = self.decompressor.flush() + + if chunk or self.size > 0: + self.out.feed_data(chunk, len(chunk)) + if self.encoding == "deflate" and not self.decompressor.eof: + raise ContentEncodingError("deflate") + + self.out.feed_eof() + + def begin_http_chunk_receiving(self) -> None: + self.out.begin_http_chunk_receiving() + + def end_http_chunk_receiving(self) -> None: + self.out.end_http_chunk_receiving() + + +HttpRequestParserPy = HttpRequestParser +HttpResponseParserPy = HttpResponseParser +RawRequestMessagePy = RawRequestMessage +RawResponseMessagePy = RawResponseMessage + +try: + if not NO_EXTENSIONS: + from ._http_parser import ( # type: ignore[import,no-redef] + HttpRequestParser, + HttpResponseParser, + RawRequestMessage, + RawResponseMessage, + ) + + HttpRequestParserC = HttpRequestParser + HttpResponseParserC = HttpResponseParser + RawRequestMessageC = RawRequestMessage + RawResponseMessageC = RawResponseMessage +except ImportError: # pragma: no cover + pass diff --git a/venv/lib/python3.10/site-packages/aiohttp/http_websocket.py b/venv/lib/python3.10/site-packages/aiohttp/http_websocket.py new file mode 100644 index 0000000..2cfc519 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/http_websocket.py @@ -0,0 +1,701 @@ +"""WebSocket protocol versions 13 and 8.""" + +import asyncio +import collections +import json +import random +import re +import sys +import zlib +from enum import IntEnum +from struct import Struct +from typing import Any, Callable, List, Optional, Pattern, Set, Tuple, Union, cast + +from .base_protocol import BaseProtocol +from .helpers import NO_EXTENSIONS +from .streams import DataQueue +from .typedefs import Final + +__all__ = ( + "WS_CLOSED_MESSAGE", + "WS_CLOSING_MESSAGE", + "WS_KEY", + "WebSocketReader", + "WebSocketWriter", + "WSMessage", + "WebSocketError", + "WSMsgType", + "WSCloseCode", +) + + +class WSCloseCode(IntEnum): + OK = 1000 + GOING_AWAY = 1001 + PROTOCOL_ERROR = 1002 + UNSUPPORTED_DATA = 1003 + ABNORMAL_CLOSURE = 1006 + INVALID_TEXT = 1007 + POLICY_VIOLATION = 1008 + MESSAGE_TOO_BIG = 1009 + MANDATORY_EXTENSION = 1010 + INTERNAL_ERROR = 1011 + SERVICE_RESTART = 1012 + TRY_AGAIN_LATER = 1013 + BAD_GATEWAY = 1014 + + +ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode} + + +class WSMsgType(IntEnum): + # websocket spec types + CONTINUATION = 0x0 + TEXT = 0x1 + BINARY = 0x2 + PING = 0x9 + PONG = 0xA + CLOSE = 0x8 + + # aiohttp specific types + CLOSING = 0x100 + CLOSED = 0x101 + ERROR = 0x102 + + text = TEXT + binary = BINARY + ping = PING + pong = PONG + close = CLOSE + closing = CLOSING + closed = CLOSED + error = ERROR + + +WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" + + +UNPACK_LEN2 = Struct("!H").unpack_from +UNPACK_LEN3 = Struct("!Q").unpack_from +UNPACK_CLOSE_CODE = Struct("!H").unpack +PACK_LEN1 = Struct("!BB").pack +PACK_LEN2 = Struct("!BBH").pack +PACK_LEN3 = Struct("!BBQ").pack +PACK_CLOSE_CODE = Struct("!H").pack +MSG_SIZE: Final[int] = 2**14 +DEFAULT_LIMIT: Final[int] = 2**16 + + +_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"]) + + +class WSMessage(_WSMessageBase): + def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: + """Return parsed JSON data. + + .. versionadded:: 0.22 + """ + return loads(self.data) + + +WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None) +WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None) + + +class WebSocketError(Exception): + """WebSocket protocol parser error.""" + + def __init__(self, code: int, message: str) -> None: + self.code = code + super().__init__(code, message) + + def __str__(self) -> str: + return cast(str, self.args[1]) + + +class WSHandshakeError(Exception): + """WebSocket protocol handshake error.""" + + +native_byteorder: Final[str] = sys.byteorder + + +# Used by _websocket_mask_python +_XOR_TABLE: Final[List[bytes]] = [bytes(a ^ b for a in range(256)) for b in range(256)] + + +def _websocket_mask_python(mask: bytes, data: bytearray) -> None: + """Websocket masking function. + + `mask` is a `bytes` object of length 4; `data` is a `bytearray` + object of any length. The contents of `data` are masked with `mask`, + as specified in section 5.3 of RFC 6455. + + Note that this function mutates the `data` argument. + + This pure-python implementation may be replaced by an optimized + version when available. + + """ + assert isinstance(data, bytearray), data + assert len(mask) == 4, mask + + if data: + a, b, c, d = (_XOR_TABLE[n] for n in mask) + data[::4] = data[::4].translate(a) + data[1::4] = data[1::4].translate(b) + data[2::4] = data[2::4].translate(c) + data[3::4] = data[3::4].translate(d) + + +if NO_EXTENSIONS: # pragma: no cover + _websocket_mask = _websocket_mask_python +else: + try: + from ._websocket import _websocket_mask_cython # type: ignore[import] + + _websocket_mask = _websocket_mask_cython + except ImportError: # pragma: no cover + _websocket_mask = _websocket_mask_python + +_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF]) + + +_WS_EXT_RE: Final[Pattern[str]] = re.compile( + r"^(?:;\s*(?:" + r"(server_no_context_takeover)|" + r"(client_no_context_takeover)|" + r"(server_max_window_bits(?:=(\d+))?)|" + r"(client_max_window_bits(?:=(\d+))?)))*$" +) + +_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?") + + +def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]: + if not extstr: + return 0, False + + compress = 0 + notakeover = False + for ext in _WS_EXT_RE_SPLIT.finditer(extstr): + defext = ext.group(1) + # Return compress = 15 when get `permessage-deflate` + if not defext: + compress = 15 + break + match = _WS_EXT_RE.match(defext) + if match: + compress = 15 + if isserver: + # Server never fail to detect compress handshake. + # Server does not need to send max wbit to client + if match.group(4): + compress = int(match.group(4)) + # Group3 must match if group4 matches + # Compress wbit 8 does not support in zlib + # If compress level not support, + # CONTINUE to next extension + if compress > 15 or compress < 9: + compress = 0 + continue + if match.group(1): + notakeover = True + # Ignore regex group 5 & 6 for client_max_window_bits + break + else: + if match.group(6): + compress = int(match.group(6)) + # Group5 must match if group6 matches + # Compress wbit 8 does not support in zlib + # If compress level not support, + # FAIL the parse progress + if compress > 15 or compress < 9: + raise WSHandshakeError("Invalid window size") + if match.group(2): + notakeover = True + # Ignore regex group 5 & 6 for client_max_window_bits + break + # Return Fail if client side and not match + elif not isserver: + raise WSHandshakeError("Extension for deflate not supported" + ext.group(1)) + + return compress, notakeover + + +def ws_ext_gen( + compress: int = 15, isserver: bool = False, server_notakeover: bool = False +) -> str: + # client_notakeover=False not used for server + # compress wbit 8 does not support in zlib + if compress < 9 or compress > 15: + raise ValueError( + "Compress wbits must between 9 and 15, " "zlib does not support wbits=8" + ) + enabledext = ["permessage-deflate"] + if not isserver: + enabledext.append("client_max_window_bits") + + if compress < 15: + enabledext.append("server_max_window_bits=" + str(compress)) + if server_notakeover: + enabledext.append("server_no_context_takeover") + # if client_notakeover: + # enabledext.append('client_no_context_takeover') + return "; ".join(enabledext) + + +class WSParserState(IntEnum): + READ_HEADER = 1 + READ_PAYLOAD_LENGTH = 2 + READ_PAYLOAD_MASK = 3 + READ_PAYLOAD = 4 + + +class WebSocketReader: + def __init__( + self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True + ) -> None: + self.queue = queue + self._max_msg_size = max_msg_size + + self._exc: Optional[BaseException] = None + self._partial = bytearray() + self._state = WSParserState.READ_HEADER + + self._opcode: Optional[int] = None + self._frame_fin = False + self._frame_opcode: Optional[int] = None + self._frame_payload = bytearray() + + self._tail = b"" + self._has_mask = False + self._frame_mask: Optional[bytes] = None + self._payload_length = 0 + self._payload_length_flag = 0 + self._compressed: Optional[bool] = None + self._decompressobj: Any = None # zlib.decompressobj actually + self._compress = compress + + def feed_eof(self) -> None: + self.queue.feed_eof() + + def feed_data(self, data: bytes) -> Tuple[bool, bytes]: + if self._exc: + return True, data + + try: + return self._feed_data(data) + except Exception as exc: + self._exc = exc + self.queue.set_exception(exc) + return True, b"" + + def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: + for fin, opcode, payload, compressed in self.parse_frame(data): + if compressed and not self._decompressobj: + self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS) + if opcode == WSMsgType.CLOSE: + if len(payload) >= 2: + close_code = UNPACK_CLOSE_CODE(payload[:2])[0] + if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close code: {close_code}", + ) + try: + close_message = payload[2:].decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + msg = WSMessage(WSMsgType.CLOSE, close_code, close_message) + elif payload: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close frame: {fin} {opcode} {payload!r}", + ) + else: + msg = WSMessage(WSMsgType.CLOSE, 0, "") + + self.queue.feed_data(msg, 0) + + elif opcode == WSMsgType.PING: + self.queue.feed_data( + WSMessage(WSMsgType.PING, payload, ""), len(payload) + ) + + elif opcode == WSMsgType.PONG: + self.queue.feed_data( + WSMessage(WSMsgType.PONG, payload, ""), len(payload) + ) + + elif ( + opcode not in (WSMsgType.TEXT, WSMsgType.BINARY) + and self._opcode is None + ): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" + ) + else: + # load text/binary + if not fin: + # got partial frame payload + if opcode != WSMsgType.CONTINUATION: + self._opcode = opcode + self._partial.extend(payload) + if self._max_msg_size and len(self._partial) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size + ), + ) + else: + # previous frame was non finished + # we should get continuation opcode + if self._partial: + if opcode != WSMsgType.CONTINUATION: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "The opcode in non-fin frame is expected " + "to be zero, got {!r}".format(opcode), + ) + + if opcode == WSMsgType.CONTINUATION: + assert self._opcode is not None + opcode = self._opcode + self._opcode = None + + self._partial.extend(payload) + if self._max_msg_size and len(self._partial) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size + ), + ) + + # Decompress process must to be done after all packets + # received. + if compressed: + self._partial.extend(_WS_DEFLATE_TRAILING) + payload_merged = self._decompressobj.decompress( + self._partial, self._max_msg_size + ) + if self._decompressobj.unconsumed_tail: + left = len(self._decompressobj.unconsumed_tail) + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Decompressed message size {} exceeds limit {}".format( + self._max_msg_size + left, self._max_msg_size + ), + ) + else: + payload_merged = bytes(self._partial) + + self._partial.clear() + + if opcode == WSMsgType.TEXT: + try: + text = payload_merged.decode("utf-8") + self.queue.feed_data( + WSMessage(WSMsgType.TEXT, text, ""), len(text) + ) + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + else: + self.queue.feed_data( + WSMessage(WSMsgType.BINARY, payload_merged, ""), + len(payload_merged), + ) + + return False, b"" + + def parse_frame( + self, buf: bytes + ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: + """Return the next frame from the socket.""" + frames = [] + if self._tail: + buf, self._tail = self._tail + buf, b"" + + start_pos = 0 + buf_length = len(buf) + + while True: + # read header + if self._state == WSParserState.READ_HEADER: + if buf_length - start_pos >= 2: + data = buf[start_pos : start_pos + 2] + start_pos += 2 + first_byte, second_byte = data + + fin = (first_byte >> 7) & 1 + rsv1 = (first_byte >> 6) & 1 + rsv2 = (first_byte >> 5) & 1 + rsv3 = (first_byte >> 4) & 1 + opcode = first_byte & 0xF + + # frame-fin = %x0 ; more frames of this message follow + # / %x1 ; final frame of this message + # frame-rsv1 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv2 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv3 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # + # Remove rsv1 from this test for deflate development + if rsv2 or rsv3 or (rsv1 and not self._compress): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) + + if opcode > 0x7 and fin == 0: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received fragmented control frame", + ) + + has_mask = (second_byte >> 7) & 1 + length = second_byte & 0x7F + + # Control frames MUST have a payload + # length of 125 bytes or less + if opcode > 0x7 and length > 125: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Control frame payload cannot be " "larger than 125 bytes", + ) + + # Set compress status if last package is FIN + # OR set compress status if this is first fragment + # Raise error if not first fragment with rsv1 = 0x1 + if self._frame_fin or self._compressed is None: + self._compressed = True if rsv1 else False + elif rsv1: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) + + self._frame_fin = bool(fin) + self._frame_opcode = opcode + self._has_mask = bool(has_mask) + self._payload_length_flag = length + self._state = WSParserState.READ_PAYLOAD_LENGTH + else: + break + + # read payload length + if self._state == WSParserState.READ_PAYLOAD_LENGTH: + length = self._payload_length_flag + if length == 126: + if buf_length - start_pos >= 2: + data = buf[start_pos : start_pos + 2] + start_pos += 2 + length = UNPACK_LEN2(data)[0] + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) + else: + break + elif length > 126: + if buf_length - start_pos >= 8: + data = buf[start_pos : start_pos + 8] + start_pos += 8 + length = UNPACK_LEN3(data)[0] + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) + else: + break + else: + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) + + # read payload mask + if self._state == WSParserState.READ_PAYLOAD_MASK: + if buf_length - start_pos >= 4: + self._frame_mask = buf[start_pos : start_pos + 4] + start_pos += 4 + self._state = WSParserState.READ_PAYLOAD + else: + break + + if self._state == WSParserState.READ_PAYLOAD: + length = self._payload_length + payload = self._frame_payload + + chunk_len = buf_length - start_pos + if length >= chunk_len: + self._payload_length = length - chunk_len + payload.extend(buf[start_pos:]) + start_pos = buf_length + else: + self._payload_length = 0 + payload.extend(buf[start_pos : start_pos + length]) + start_pos = start_pos + length + + if self._payload_length == 0: + if self._has_mask: + assert self._frame_mask is not None + _websocket_mask(self._frame_mask, payload) + + frames.append( + (self._frame_fin, self._frame_opcode, payload, self._compressed) + ) + + self._frame_payload = bytearray() + self._state = WSParserState.READ_HEADER + else: + break + + self._tail = buf[start_pos:] + + return frames + + +class WebSocketWriter: + def __init__( + self, + protocol: BaseProtocol, + transport: asyncio.Transport, + *, + use_mask: bool = False, + limit: int = DEFAULT_LIMIT, + random: Any = random.Random(), + compress: int = 0, + notakeover: bool = False, + ) -> None: + self.protocol = protocol + self.transport = transport + self.use_mask = use_mask + self.randrange = random.randrange + self.compress = compress + self.notakeover = notakeover + self._closing = False + self._limit = limit + self._output_size = 0 + self._compressobj: Any = None # actually compressobj + + async def _send_frame( + self, message: bytes, opcode: int, compress: Optional[int] = None + ) -> None: + """Send a frame over the websocket with message as its payload.""" + if self._closing and not (opcode & WSMsgType.CLOSE): + raise ConnectionResetError("Cannot write to closing transport") + + rsv = 0 + + # Only compress larger packets (disabled) + # Does small packet needs to be compressed? + # if self.compress and opcode < 8 and len(message) > 124: + if (compress or self.compress) and opcode < 8: + if compress: + # Do not set self._compress if compressing is for this frame + compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress) + else: # self.compress + if not self._compressobj: + self._compressobj = zlib.compressobj( + level=zlib.Z_BEST_SPEED, wbits=-self.compress + ) + compressobj = self._compressobj + + message = compressobj.compress(message) + message = message + compressobj.flush( + zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH + ) + if message.endswith(_WS_DEFLATE_TRAILING): + message = message[:-4] + rsv = rsv | 0x40 + + msg_length = len(message) + + use_mask = self.use_mask + if use_mask: + mask_bit = 0x80 + else: + mask_bit = 0 + + if msg_length < 126: + header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) + elif msg_length < (1 << 16): + header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) + else: + header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) + if use_mask: + mask = self.randrange(0, 0xFFFFFFFF) + mask = mask.to_bytes(4, "big") + message = bytearray(message) + _websocket_mask(mask, message) + self._write(header + mask + message) + self._output_size += len(header) + len(mask) + len(message) + else: + if len(message) > MSG_SIZE: + self._write(header) + self._write(message) + else: + self._write(header + message) + + self._output_size += len(header) + len(message) + + if self._output_size > self._limit: + self._output_size = 0 + await self.protocol._drain_helper() + + def _write(self, data: bytes) -> None: + if self.transport is None or self.transport.is_closing(): + raise ConnectionResetError("Cannot write to closing transport") + self.transport.write(data) + + async def pong(self, message: bytes = b"") -> None: + """Send pong message.""" + if isinstance(message, str): + message = message.encode("utf-8") + await self._send_frame(message, WSMsgType.PONG) + + async def ping(self, message: bytes = b"") -> None: + """Send ping message.""" + if isinstance(message, str): + message = message.encode("utf-8") + await self._send_frame(message, WSMsgType.PING) + + async def send( + self, + message: Union[str, bytes], + binary: bool = False, + compress: Optional[int] = None, + ) -> None: + """Send a frame over the websocket with message as its payload.""" + if isinstance(message, str): + message = message.encode("utf-8") + if binary: + await self._send_frame(message, WSMsgType.BINARY, compress) + else: + await self._send_frame(message, WSMsgType.TEXT, compress) + + async def close(self, code: int = 1000, message: bytes = b"") -> None: + """Close the websocket, sending the specified code and message.""" + if isinstance(message, str): + message = message.encode("utf-8") + try: + await self._send_frame( + PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE + ) + finally: + self._closing = True diff --git a/venv/lib/python3.10/site-packages/aiohttp/http_writer.py b/venv/lib/python3.10/site-packages/aiohttp/http_writer.py new file mode 100644 index 0000000..73f0f96 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/http_writer.py @@ -0,0 +1,198 @@ +"""Http related parsers and protocol.""" + +import asyncio +import zlib +from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa + +from multidict import CIMultiDict + +from .abc import AbstractStreamWriter +from .base_protocol import BaseProtocol +from .helpers import NO_EXTENSIONS + +__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11") + + +class HttpVersion(NamedTuple): + major: int + minor: int + + +HttpVersion10 = HttpVersion(1, 0) +HttpVersion11 = HttpVersion(1, 1) + + +_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] +_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]] + + +class StreamWriter(AbstractStreamWriter): + def __init__( + self, + protocol: BaseProtocol, + loop: asyncio.AbstractEventLoop, + on_chunk_sent: _T_OnChunkSent = None, + on_headers_sent: _T_OnHeadersSent = None, + ) -> None: + self._protocol = protocol + + self.loop = loop + self.length = None + self.chunked = False + self.buffer_size = 0 + self.output_size = 0 + + self._eof = False + self._compress: Any = None + self._drain_waiter = None + + self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent + self._on_headers_sent: _T_OnHeadersSent = on_headers_sent + + @property + def transport(self) -> Optional[asyncio.Transport]: + return self._protocol.transport + + @property + def protocol(self) -> BaseProtocol: + return self._protocol + + def enable_chunking(self) -> None: + self.chunked = True + + def enable_compression( + self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + ) -> None: + zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS + self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy) + + def _write(self, chunk: bytes) -> None: + size = len(chunk) + self.buffer_size += size + self.output_size += size + transport = self.transport + if not self._protocol.connected or transport is None or transport.is_closing(): + raise ConnectionResetError("Cannot write to closing transport") + transport.write(chunk) + + async def write( + self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000 + ) -> None: + """Writes chunk of data to a stream. + + write_eof() indicates end of stream. + writer can't be used after write_eof() method being called. + write() return drain future. + """ + if self._on_chunk_sent is not None: + await self._on_chunk_sent(chunk) + + if isinstance(chunk, memoryview): + if chunk.nbytes != len(chunk): + # just reshape it + chunk = chunk.cast("c") + + if self._compress is not None: + chunk = self._compress.compress(chunk) + if not chunk: + return + + if self.length is not None: + chunk_len = len(chunk) + if self.length >= chunk_len: + self.length = self.length - chunk_len + else: + chunk = chunk[: self.length] + self.length = 0 + if not chunk: + return + + if chunk: + if self.chunked: + chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii") + chunk = chunk_len_pre + chunk + b"\r\n" + + self._write(chunk) + + if self.buffer_size > LIMIT and drain: + self.buffer_size = 0 + await self.drain() + + async def write_headers( + self, status_line: str, headers: "CIMultiDict[str]" + ) -> None: + """Write request/response status and headers.""" + if self._on_headers_sent is not None: + await self._on_headers_sent(headers) + + # status + headers + buf = _serialize_headers(status_line, headers) + self._write(buf) + + async def write_eof(self, chunk: bytes = b"") -> None: + if self._eof: + return + + if chunk and self._on_chunk_sent is not None: + await self._on_chunk_sent(chunk) + + if self._compress: + if chunk: + chunk = self._compress.compress(chunk) + + chunk = chunk + self._compress.flush() + if chunk and self.chunked: + chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") + chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" + else: + if self.chunked: + if chunk: + chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") + chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" + else: + chunk = b"0\r\n\r\n" + + if chunk: + self._write(chunk) + + await self.drain() + + self._eof = True + + async def drain(self) -> None: + """Flush the write buffer. + + The intended use is to write + + await w.write(data) + await w.drain() + """ + if self._protocol.transport is not None: + await self._protocol._drain_helper() + + +def _safe_header(string: str) -> str: + if "\r" in string or "\n" in string: + raise ValueError( + "Newline or carriage return detected in headers. " + "Potential header injection attack." + ) + return string + + +def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes: + headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items()) + line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n" + return line.encode("utf-8") + + +_serialize_headers = _py_serialize_headers + +try: + import aiohttp._http_writer as _http_writer # type: ignore[import] + + _c_serialize_headers = _http_writer._serialize_headers + if not NO_EXTENSIONS: + _serialize_headers = _c_serialize_headers +except ImportError: + pass diff --git a/venv/lib/python3.10/site-packages/aiohttp/locks.py b/venv/lib/python3.10/site-packages/aiohttp/locks.py new file mode 100644 index 0000000..de2dc83 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/locks.py @@ -0,0 +1,41 @@ +import asyncio +import collections +from typing import Any, Deque, Optional + + +class EventResultOrError: + """Event asyncio lock helper class. + + Wraps the Event asyncio lock allowing either to awake the + locked Tasks without any error or raising an exception. + + thanks to @vorpalsmith for the simple design. + """ + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._exc: Optional[BaseException] = None + self._event = asyncio.Event() + self._waiters: Deque[asyncio.Future[Any]] = collections.deque() + + def set(self, exc: Optional[BaseException] = None) -> None: + self._exc = exc + self._event.set() + + async def wait(self) -> Any: + waiter = self._loop.create_task(self._event.wait()) + self._waiters.append(waiter) + try: + val = await waiter + finally: + self._waiters.remove(waiter) + + if self._exc is not None: + raise self._exc + + return val + + def cancel(self) -> None: + """Cancel all waiters""" + for waiter in self._waiters: + waiter.cancel() diff --git a/venv/lib/python3.10/site-packages/aiohttp/log.py b/venv/lib/python3.10/site-packages/aiohttp/log.py new file mode 100644 index 0000000..3cecea2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/log.py @@ -0,0 +1,8 @@ +import logging + +access_logger = logging.getLogger("aiohttp.access") +client_logger = logging.getLogger("aiohttp.client") +internal_logger = logging.getLogger("aiohttp.internal") +server_logger = logging.getLogger("aiohttp.server") +web_logger = logging.getLogger("aiohttp.web") +ws_logger = logging.getLogger("aiohttp.websocket") diff --git a/venv/lib/python3.10/site-packages/aiohttp/multipart.py b/venv/lib/python3.10/site-packages/aiohttp/multipart.py new file mode 100644 index 0000000..73801f4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/multipart.py @@ -0,0 +1,961 @@ +import base64 +import binascii +import json +import re +import uuid +import warnings +import zlib +from collections import deque +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Deque, + Dict, + Iterator, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +from urllib.parse import parse_qsl, unquote, urlencode + +from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping + +from .hdrs import ( + CONTENT_DISPOSITION, + CONTENT_ENCODING, + CONTENT_LENGTH, + CONTENT_TRANSFER_ENCODING, + CONTENT_TYPE, +) +from .helpers import CHAR, TOKEN, parse_mimetype, reify +from .http import HeadersParser +from .payload import ( + JsonPayload, + LookupError, + Order, + Payload, + StringPayload, + get_payload, + payload_type, +) +from .streams import StreamReader + +__all__ = ( + "MultipartReader", + "MultipartWriter", + "BodyPartReader", + "BadContentDispositionHeader", + "BadContentDispositionParam", + "parse_content_disposition", + "content_disposition_filename", +) + + +if TYPE_CHECKING: # pragma: no cover + from .client_reqrep import ClientResponse + + +class BadContentDispositionHeader(RuntimeWarning): + pass + + +class BadContentDispositionParam(RuntimeWarning): + pass + + +def parse_content_disposition( + header: Optional[str], +) -> Tuple[Optional[str], Dict[str, str]]: + def is_token(string: str) -> bool: + return bool(string) and TOKEN >= set(string) + + def is_quoted(string: str) -> bool: + return string[0] == string[-1] == '"' + + def is_rfc5987(string: str) -> bool: + return is_token(string) and string.count("'") == 2 + + def is_extended_param(string: str) -> bool: + return string.endswith("*") + + def is_continuous_param(string: str) -> bool: + pos = string.find("*") + 1 + if not pos: + return False + substring = string[pos:-1] if string.endswith("*") else string[pos:] + return substring.isdigit() + + def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str: + return re.sub(f"\\\\([{chars}])", "\\1", text) + + if not header: + return None, {} + + disptype, *parts = header.split(";") + if not is_token(disptype): + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params: Dict[str, str] = {} + while parts: + item = parts.pop(0) + + if "=" not in item: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + key, value = item.split("=", 1) + key = key.lower().strip() + value = value.lstrip() + + if key in params: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + if not is_token(key): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_continuous_param(key): + if is_quoted(value): + value = unescape(value[1:-1]) + elif not is_token(value): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_extended_param(key): + if is_rfc5987(value): + encoding, _, value = value.split("'", 2) + encoding = encoding or "utf-8" + else: + warnings.warn(BadContentDispositionParam(item)) + continue + + try: + value = unquote(value, encoding, "strict") + except UnicodeDecodeError: # pragma: nocover + warnings.warn(BadContentDispositionParam(item)) + continue + + else: + failed = True + if is_quoted(value): + failed = False + value = unescape(value[1:-1].lstrip("\\/")) + elif is_token(value): + failed = False + elif parts: + # maybe just ; in filename, in any case this is just + # one case fix, for proper fix we need to redesign parser + _value = f"{value};{parts[0]}" + if is_quoted(_value): + parts.pop(0) + value = unescape(_value[1:-1].lstrip("\\/")) + failed = False + + if failed: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params[key] = value + + return disptype.lower(), params + + +def content_disposition_filename( + params: Mapping[str, str], name: str = "filename" +) -> Optional[str]: + name_suf = "%s*" % name + if not params: + return None + elif name_suf in params: + return params[name_suf] + elif name in params: + return params[name] + else: + parts = [] + fnparams = sorted( + (key, value) for key, value in params.items() if key.startswith(name_suf) + ) + for num, (key, value) in enumerate(fnparams): + _, tail = key.split("*", 1) + if tail.endswith("*"): + tail = tail[:-1] + if tail == str(num): + parts.append(value) + else: + break + if not parts: + return None + value = "".join(parts) + if "'" in value: + encoding, _, value = value.split("'", 2) + encoding = encoding or "utf-8" + return unquote(value, encoding, "strict") + return value + + +class MultipartResponseWrapper: + """Wrapper around the MultipartReader. + + It takes care about + underlying connection and close it when it needs in. + """ + + def __init__( + self, + resp: "ClientResponse", + stream: "MultipartReader", + ) -> None: + self.resp = resp + self.stream = stream + + def __aiter__(self) -> "MultipartResponseWrapper": + return self + + async def __anext__( + self, + ) -> Union["MultipartReader", "BodyPartReader"]: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + def at_eof(self) -> bool: + """Returns True when all response data had been read.""" + return self.resp.content.at_eof() + + async def next( + self, + ) -> Optional[Union["MultipartReader", "BodyPartReader"]]: + """Emits next multipart reader object.""" + item = await self.stream.next() + if self.stream.at_eof(): + await self.release() + return item + + async def release(self) -> None: + """Release the connection gracefully. + + All remaining content is read to the void. + """ + await self.resp.release() + + +class BodyPartReader: + """Multipart reader for single body part.""" + + chunk_size = 8192 + + def __init__( + self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader + ) -> None: + self.headers = headers + self._boundary = boundary + self._content = content + self._at_eof = False + length = self.headers.get(CONTENT_LENGTH, None) + self._length = int(length) if length is not None else None + self._read_bytes = 0 + # TODO: typeing.Deque is not supported by Python 3.5 + self._unread: Deque[bytes] = deque() + self._prev_chunk: Optional[bytes] = None + self._content_eof = 0 + self._cache: Dict[str, Any] = {} + + def __aiter__(self) -> AsyncIterator["BodyPartReader"]: + return self # type: ignore[return-value] + + async def __anext__(self) -> bytes: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + async def next(self) -> Optional[bytes]: + item = await self.read() + if not item: + return None + return item + + async def read(self, *, decode: bool = False) -> bytes: + """Reads body part data. + + decode: Decodes data following by encoding + method from Content-Encoding header. If it missed + data remains untouched + """ + if self._at_eof: + return b"" + data = bytearray() + while not self._at_eof: + data.extend(await self.read_chunk(self.chunk_size)) + if decode: + return self.decode(data) + return data + + async def read_chunk(self, size: int = chunk_size) -> bytes: + """Reads body part content chunk of the specified size. + + size: chunk size + """ + if self._at_eof: + return b"" + if self._length: + chunk = await self._read_chunk_from_length(size) + else: + chunk = await self._read_chunk_from_stream(size) + + self._read_bytes += len(chunk) + if self._read_bytes == self._length: + self._at_eof = True + if self._at_eof: + clrf = await self._content.readline() + assert ( + b"\r\n" == clrf + ), "reader did not read all the data or it is malformed" + return chunk + + async def _read_chunk_from_length(self, size: int) -> bytes: + # Reads body part content chunk of the specified size. + # The body part must has Content-Length header with proper value. + assert self._length is not None, "Content-Length required for chunked read" + chunk_size = min(size, self._length - self._read_bytes) + chunk = await self._content.read(chunk_size) + return chunk + + async def _read_chunk_from_stream(self, size: int) -> bytes: + # Reads content chunk of body part with unknown length. + # The Content-Length header for body part is not necessary. + assert ( + size >= len(self._boundary) + 2 + ), "Chunk size must be greater or equal than boundary length + 2" + first_chunk = self._prev_chunk is None + if first_chunk: + self._prev_chunk = await self._content.read(size) + + chunk = await self._content.read(size) + self._content_eof += int(self._content.at_eof()) + assert self._content_eof < 3, "Reading after EOF" + assert self._prev_chunk is not None + window = self._prev_chunk + chunk + sub = b"\r\n" + self._boundary + if first_chunk: + idx = window.find(sub) + else: + idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub))) + if idx >= 0: + # pushing boundary back to content + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + self._content.unread_data(window[idx:]) + if size > idx: + self._prev_chunk = self._prev_chunk[:idx] + chunk = window[len(self._prev_chunk) : idx] + if not chunk: + self._at_eof = True + result = self._prev_chunk + self._prev_chunk = chunk + return result + + async def readline(self) -> bytes: + """Reads body part by line by line.""" + if self._at_eof: + return b"" + + if self._unread: + line = self._unread.popleft() + else: + line = await self._content.readline() + + if line.startswith(self._boundary): + # the very last boundary may not come with \r\n, + # so set single rules for everyone + sline = line.rstrip(b"\r\n") + boundary = self._boundary + last_boundary = self._boundary + b"--" + # ensure that we read exactly the boundary, not something alike + if sline == boundary or sline == last_boundary: + self._at_eof = True + self._unread.append(line) + return b"" + else: + next_line = await self._content.readline() + if next_line.startswith(self._boundary): + line = line[:-2] # strip CRLF but only once + self._unread.append(next_line) + + return line + + async def release(self) -> None: + """Like read(), but reads all the data to the void.""" + if self._at_eof: + return + while not self._at_eof: + await self.read_chunk(self.chunk_size) + + async def text(self, *, encoding: Optional[str] = None) -> str: + """Like read(), but assumes that body part contains text data.""" + data = await self.read(decode=True) + # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA + # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA + encoding = encoding or self.get_charset(default="utf-8") + return data.decode(encoding) + + async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]: + """Like read(), but assumes that body parts contains JSON data.""" + data = await self.read(decode=True) + if not data: + return None + encoding = encoding or self.get_charset(default="utf-8") + return cast(Dict[str, Any], json.loads(data.decode(encoding))) + + async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]: + """Like read(), but assumes that body parts contain form urlencoded data.""" + data = await self.read(decode=True) + if not data: + return [] + if encoding is not None: + real_encoding = encoding + else: + real_encoding = self.get_charset(default="utf-8") + return parse_qsl( + data.rstrip().decode(real_encoding), + keep_blank_values=True, + encoding=real_encoding, + ) + + def at_eof(self) -> bool: + """Returns True if the boundary was reached or False otherwise.""" + return self._at_eof + + def decode(self, data: bytes) -> bytes: + """Decodes data. + + Decoding is done according the specified Content-Encoding + or Content-Transfer-Encoding headers value. + """ + if CONTENT_TRANSFER_ENCODING in self.headers: + data = self._decode_content_transfer(data) + if CONTENT_ENCODING in self.headers: + return self._decode_content(data) + return data + + def _decode_content(self, data: bytes) -> bytes: + encoding = self.headers.get(CONTENT_ENCODING, "").lower() + + if encoding == "deflate": + return zlib.decompress(data, -zlib.MAX_WBITS) + elif encoding == "gzip": + return zlib.decompress(data, 16 + zlib.MAX_WBITS) + elif encoding == "identity": + return data + else: + raise RuntimeError(f"unknown content encoding: {encoding}") + + def _decode_content_transfer(self, data: bytes) -> bytes: + encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() + + if encoding == "base64": + return base64.b64decode(data) + elif encoding == "quoted-printable": + return binascii.a2b_qp(data) + elif encoding in ("binary", "8bit", "7bit"): + return data + else: + raise RuntimeError( + "unknown content transfer encoding: {}" "".format(encoding) + ) + + def get_charset(self, default: str) -> str: + """Returns charset parameter from Content-Type header or default.""" + ctype = self.headers.get(CONTENT_TYPE, "") + mimetype = parse_mimetype(ctype) + return mimetype.parameters.get("charset", default) + + @reify + def name(self) -> Optional[str]: + """Returns name specified in Content-Disposition header. + + If the header is missing or malformed, returns None. + """ + _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params, "name") + + @reify + def filename(self) -> Optional[str]: + """Returns filename specified in Content-Disposition header. + + Returns None if the header is missing or malformed. + """ + _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params, "filename") + + +@payload_type(BodyPartReader, order=Order.try_first) +class BodyPartReaderPayload(Payload): + def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: + super().__init__(value, *args, **kwargs) + + params: Dict[str, str] = {} + if value.name is not None: + params["name"] = value.name + if value.filename is not None: + params["filename"] = value.filename + + if params: + self.set_content_disposition("attachment", True, **params) + + async def write(self, writer: Any) -> None: + field = self._value + chunk = await field.read_chunk(size=2**16) + while chunk: + await writer.write(field.decode(chunk)) + chunk = await field.read_chunk(size=2**16) + + +class MultipartReader: + """Multipart body reader.""" + + #: Response wrapper, used when multipart readers constructs from response. + response_wrapper_cls = MultipartResponseWrapper + #: Multipart reader class, used to handle multipart/* body parts. + #: None points to type(self) + multipart_reader_cls = None + #: Body part reader class for non multipart/* content types. + part_reader_cls = BodyPartReader + + def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: + self.headers = headers + self._boundary = ("--" + self._get_boundary()).encode() + self._content = content + self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None + self._at_eof = False + self._at_bof = True + self._unread: List[bytes] = [] + + def __aiter__( + self, + ) -> AsyncIterator["BodyPartReader"]: + return self # type: ignore[return-value] + + async def __anext__( + self, + ) -> Optional[Union["MultipartReader", BodyPartReader]]: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + @classmethod + def from_response( + cls, + response: "ClientResponse", + ) -> MultipartResponseWrapper: + """Constructs reader instance from HTTP response. + + :param response: :class:`~aiohttp.client.ClientResponse` instance + """ + obj = cls.response_wrapper_cls( + response, cls(response.headers, response.content) + ) + return obj + + def at_eof(self) -> bool: + """Returns True if the final boundary was reached, false otherwise.""" + return self._at_eof + + async def next( + self, + ) -> Optional[Union["MultipartReader", BodyPartReader]]: + """Emits the next multipart body part.""" + # So, if we're at BOF, we need to skip till the boundary. + if self._at_eof: + return None + await self._maybe_release_last_part() + if self._at_bof: + await self._read_until_first_boundary() + self._at_bof = False + else: + await self._read_boundary() + if self._at_eof: # we just read the last boundary, nothing to do there + return None + self._last_part = await self.fetch_next_part() + return self._last_part + + async def release(self) -> None: + """Reads all the body parts to the void till the final boundary.""" + while not self._at_eof: + item = await self.next() + if item is None: + break + await item.release() + + async def fetch_next_part( + self, + ) -> Union["MultipartReader", BodyPartReader]: + """Returns the next body part reader.""" + headers = await self._read_headers() + return self._get_part_reader(headers) + + def _get_part_reader( + self, + headers: "CIMultiDictProxy[str]", + ) -> Union["MultipartReader", BodyPartReader]: + """Dispatches the response by the `Content-Type` header. + + Returns a suitable reader instance. + + :param dict headers: Response headers + """ + ctype = headers.get(CONTENT_TYPE, "") + mimetype = parse_mimetype(ctype) + + if mimetype.type == "multipart": + if self.multipart_reader_cls is None: + return type(self)(headers, self._content) + return self.multipart_reader_cls(headers, self._content) + else: + return self.part_reader_cls(self._boundary, headers, self._content) + + def _get_boundary(self) -> str: + mimetype = parse_mimetype(self.headers[CONTENT_TYPE]) + + assert mimetype.type == "multipart", "multipart/* content type expected" + + if "boundary" not in mimetype.parameters: + raise ValueError( + "boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE] + ) + + boundary = mimetype.parameters["boundary"] + if len(boundary) > 70: + raise ValueError("boundary %r is too long (70 chars max)" % boundary) + + return boundary + + async def _readline(self) -> bytes: + if self._unread: + return self._unread.pop() + return await self._content.readline() + + async def _read_until_first_boundary(self) -> None: + while True: + chunk = await self._readline() + if chunk == b"": + raise ValueError( + "Could not find starting boundary %r" % (self._boundary) + ) + chunk = chunk.rstrip() + if chunk == self._boundary: + return + elif chunk == self._boundary + b"--": + self._at_eof = True + return + + async def _read_boundary(self) -> None: + chunk = (await self._readline()).rstrip() + if chunk == self._boundary: + pass + elif chunk == self._boundary + b"--": + self._at_eof = True + epilogue = await self._readline() + next_line = await self._readline() + + # the epilogue is expected and then either the end of input or the + # parent multipart boundary, if the parent boundary is found then + # it should be marked as unread and handed to the parent for + # processing + if next_line[:2] == b"--": + self._unread.append(next_line) + # otherwise the request is likely missing an epilogue and both + # lines should be passed to the parent for processing + # (this handles the old behavior gracefully) + else: + self._unread.extend([next_line, epilogue]) + else: + raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}") + + async def _read_headers(self) -> "CIMultiDictProxy[str]": + lines = [b""] + while True: + chunk = await self._content.readline() + chunk = chunk.strip() + lines.append(chunk) + if not chunk: + break + parser = HeadersParser() + headers, raw_headers = parser.parse_headers(lines) + return headers + + async def _maybe_release_last_part(self) -> None: + """Ensures that the last read body part is read completely.""" + if self._last_part is not None: + if not self._last_part.at_eof(): + await self._last_part.release() + self._unread.extend(self._last_part._unread) + self._last_part = None + + +_Part = Tuple[Payload, str, str] + + +class MultipartWriter(Payload): + """Multipart body writer.""" + + def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: + boundary = boundary if boundary is not None else uuid.uuid4().hex + # The underlying Payload API demands a str (utf-8), not bytes, + # so we need to ensure we don't lose anything during conversion. + # As a result, require the boundary to be ASCII only. + # In both situations. + + try: + self._boundary = boundary.encode("ascii") + except UnicodeEncodeError: + raise ValueError("boundary should contain ASCII only chars") from None + ctype = f"multipart/{subtype}; boundary={self._boundary_value}" + + super().__init__(None, content_type=ctype) + + self._parts: List[_Part] = [] + + def __enter__(self) -> "MultipartWriter": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + pass + + def __iter__(self) -> Iterator[_Part]: + return iter(self._parts) + + def __len__(self) -> int: + return len(self._parts) + + def __bool__(self) -> bool: + return True + + _valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z") + _invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]") + + @property + def _boundary_value(self) -> str: + """Wrap boundary parameter value in quotes, if necessary. + + Reads self.boundary and returns a unicode sting. + """ + # Refer to RFCs 7231, 7230, 5234. + # + # parameter = token "=" ( token / quoted-string ) + # token = 1*tchar + # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE + # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text + # obs-text = %x80-FF + # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) + # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" + # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" + # / DIGIT / ALPHA + # ; any VCHAR, except delimiters + # VCHAR = %x21-7E + value = self._boundary + if re.match(self._valid_tchar_regex, value): + return value.decode("ascii") # cannot fail + + if re.search(self._invalid_qdtext_char_regex, value): + raise ValueError("boundary value contains invalid characters") + + # escape %x5C and %x22 + quoted_value_content = value.replace(b"\\", b"\\\\") + quoted_value_content = quoted_value_content.replace(b'"', b'\\"') + + return '"' + quoted_value_content.decode("ascii") + '"' + + @property + def boundary(self) -> str: + return self._boundary.decode("ascii") + + def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload: + if headers is None: + headers = CIMultiDict() + + if isinstance(obj, Payload): + obj.headers.update(headers) + return self.append_payload(obj) + else: + try: + payload = get_payload(obj, headers=headers) + except LookupError: + raise TypeError("Cannot create payload from %r" % obj) + else: + return self.append_payload(payload) + + def append_payload(self, payload: Payload) -> Payload: + """Adds a new body part to multipart writer.""" + # compression + encoding: Optional[str] = payload.headers.get( + CONTENT_ENCODING, + "", + ).lower() + if encoding and encoding not in ("deflate", "gzip", "identity"): + raise RuntimeError(f"unknown content encoding: {encoding}") + if encoding == "identity": + encoding = None + + # te encoding + te_encoding: Optional[str] = payload.headers.get( + CONTENT_TRANSFER_ENCODING, + "", + ).lower() + if te_encoding not in ("", "base64", "quoted-printable", "binary"): + raise RuntimeError( + "unknown content transfer encoding: {}" "".format(te_encoding) + ) + if te_encoding == "binary": + te_encoding = None + + # size + size = payload.size + if size is not None and not (encoding or te_encoding): + payload.headers[CONTENT_LENGTH] = str(size) + + self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type] + return payload + + def append_json( + self, obj: Any, headers: Optional[MultiMapping[str]] = None + ) -> Payload: + """Helper to append JSON part.""" + if headers is None: + headers = CIMultiDict() + + return self.append_payload(JsonPayload(obj, headers=headers)) + + def append_form( + self, + obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], + headers: Optional[MultiMapping[str]] = None, + ) -> Payload: + """Helper to append form urlencoded part.""" + assert isinstance(obj, (Sequence, Mapping)) + + if headers is None: + headers = CIMultiDict() + + if isinstance(obj, Mapping): + obj = list(obj.items()) + data = urlencode(obj, doseq=True) + + return self.append_payload( + StringPayload( + data, headers=headers, content_type="application/x-www-form-urlencoded" + ) + ) + + @property + def size(self) -> Optional[int]: + """Size of the payload.""" + total = 0 + for part, encoding, te_encoding in self._parts: + if encoding or te_encoding or part.size is None: + return None + + total += int( + 2 + + len(self._boundary) + + 2 + + part.size # b'--'+self._boundary+b'\r\n' + + len(part._binary_headers) + + 2 # b'\r\n' + ) + + total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' + return total + + async def write(self, writer: Any, close_boundary: bool = True) -> None: + """Write body.""" + for part, encoding, te_encoding in self._parts: + await writer.write(b"--" + self._boundary + b"\r\n") + await writer.write(part._binary_headers) + + if encoding or te_encoding: + w = MultipartPayloadWriter(writer) + if encoding: + w.enable_compression(encoding) + if te_encoding: + w.enable_encoding(te_encoding) + await part.write(w) # type: ignore[arg-type] + await w.write_eof() + else: + await part.write(writer) + + await writer.write(b"\r\n") + + if close_boundary: + await writer.write(b"--" + self._boundary + b"--\r\n") + + +class MultipartPayloadWriter: + def __init__(self, writer: Any) -> None: + self._writer = writer + self._encoding: Optional[str] = None + self._compress: Any = None + self._encoding_buffer: Optional[bytearray] = None + + def enable_encoding(self, encoding: str) -> None: + if encoding == "base64": + self._encoding = encoding + self._encoding_buffer = bytearray() + elif encoding == "quoted-printable": + self._encoding = "quoted-printable" + + def enable_compression( + self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + ) -> None: + zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS + self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy) + + async def write_eof(self) -> None: + if self._compress is not None: + chunk = self._compress.flush() + if chunk: + self._compress = None + await self.write(chunk) + + if self._encoding == "base64": + if self._encoding_buffer: + await self._writer.write(base64.b64encode(self._encoding_buffer)) + + async def write(self, chunk: bytes) -> None: + if self._compress is not None: + if chunk: + chunk = self._compress.compress(chunk) + if not chunk: + return + + if self._encoding == "base64": + buf = self._encoding_buffer + assert buf is not None + buf.extend(chunk) + + if buf: + div, mod = divmod(len(buf), 3) + enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :]) + if enc_chunk: + b64chunk = base64.b64encode(enc_chunk) + await self._writer.write(b64chunk) + elif self._encoding == "quoted-printable": + await self._writer.write(binascii.b2a_qp(chunk)) + else: + await self._writer.write(chunk) diff --git a/venv/lib/python3.10/site-packages/aiohttp/payload.py b/venv/lib/python3.10/site-packages/aiohttp/payload.py new file mode 100644 index 0000000..625b2ea --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/payload.py @@ -0,0 +1,465 @@ +import asyncio +import enum +import io +import json +import mimetypes +import os +import warnings +from abc import ABC, abstractmethod +from itertools import chain +from typing import ( + IO, + TYPE_CHECKING, + Any, + ByteString, + Dict, + Iterable, + Optional, + TextIO, + Tuple, + Type, + Union, +) + +from multidict import CIMultiDict + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ( + PY_36, + content_disposition_header, + guess_filename, + parse_mimetype, + sentinel, +) +from .streams import StreamReader +from .typedefs import Final, JSONEncoder, _CIMultiDict + +__all__ = ( + "PAYLOAD_REGISTRY", + "get_payload", + "payload_type", + "Payload", + "BytesPayload", + "StringPayload", + "IOBasePayload", + "BytesIOPayload", + "BufferedReaderPayload", + "TextIOPayload", + "StringIOPayload", + "JsonPayload", + "AsyncIterablePayload", +) + +TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB + +if TYPE_CHECKING: # pragma: no cover + from typing import List + + +class LookupError(Exception): + pass + + +class Order(str, enum.Enum): + normal = "normal" + try_first = "try_first" + try_last = "try_last" + + +def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload": + return PAYLOAD_REGISTRY.get(data, *args, **kwargs) + + +def register_payload( + factory: Type["Payload"], type: Any, *, order: Order = Order.normal +) -> None: + PAYLOAD_REGISTRY.register(factory, type, order=order) + + +class payload_type: + def __init__(self, type: Any, *, order: Order = Order.normal) -> None: + self.type = type + self.order = order + + def __call__(self, factory: Type["Payload"]) -> Type["Payload"]: + register_payload(factory, self.type, order=self.order) + return factory + + +PayloadType = Type["Payload"] +_PayloadRegistryItem = Tuple[PayloadType, Any] + + +class PayloadRegistry: + """Payload registry. + + note: we need zope.interface for more efficient adapter search + """ + + def __init__(self) -> None: + self._first: List[_PayloadRegistryItem] = [] + self._normal: List[_PayloadRegistryItem] = [] + self._last: List[_PayloadRegistryItem] = [] + + def get( + self, + data: Any, + *args: Any, + _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain, + **kwargs: Any, + ) -> "Payload": + if isinstance(data, Payload): + return data + for factory, type in _CHAIN(self._first, self._normal, self._last): + if isinstance(data, type): + return factory(data, *args, **kwargs) + + raise LookupError() + + def register( + self, factory: PayloadType, type: Any, *, order: Order = Order.normal + ) -> None: + if order is Order.try_first: + self._first.append((factory, type)) + elif order is Order.normal: + self._normal.append((factory, type)) + elif order is Order.try_last: + self._last.append((factory, type)) + else: + raise ValueError(f"Unsupported order {order!r}") + + +class Payload(ABC): + + _default_content_type: str = "application/octet-stream" + _size: Optional[int] = None + + def __init__( + self, + value: Any, + headers: Optional[ + Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]] + ] = None, + content_type: Optional[str] = sentinel, + filename: Optional[str] = None, + encoding: Optional[str] = None, + **kwargs: Any, + ) -> None: + self._encoding = encoding + self._filename = filename + self._headers: _CIMultiDict = CIMultiDict() + self._value = value + if content_type is not sentinel and content_type is not None: + self._headers[hdrs.CONTENT_TYPE] = content_type + elif self._filename is not None: + content_type = mimetypes.guess_type(self._filename)[0] + if content_type is None: + content_type = self._default_content_type + self._headers[hdrs.CONTENT_TYPE] = content_type + else: + self._headers[hdrs.CONTENT_TYPE] = self._default_content_type + self._headers.update(headers or {}) + + @property + def size(self) -> Optional[int]: + """Size of the payload.""" + return self._size + + @property + def filename(self) -> Optional[str]: + """Filename of the payload.""" + return self._filename + + @property + def headers(self) -> _CIMultiDict: + """Custom item headers""" + return self._headers + + @property + def _binary_headers(self) -> bytes: + return ( + "".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode( + "utf-8" + ) + + b"\r\n" + ) + + @property + def encoding(self) -> Optional[str]: + """Payload encoding""" + return self._encoding + + @property + def content_type(self) -> str: + """Content type""" + return self._headers[hdrs.CONTENT_TYPE] + + def set_content_disposition( + self, + disptype: str, + quote_fields: bool = True, + _charset: str = "utf-8", + **params: Any, + ) -> None: + """Sets ``Content-Disposition`` header.""" + self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header( + disptype, quote_fields=quote_fields, _charset=_charset, **params + ) + + @abstractmethod + async def write(self, writer: AbstractStreamWriter) -> None: + """Write payload. + + writer is an AbstractStreamWriter instance: + """ + + +class BytesPayload(Payload): + def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None: + if not isinstance(value, (bytes, bytearray, memoryview)): + raise TypeError(f"value argument must be byte-ish, not {type(value)!r}") + + if "content_type" not in kwargs: + kwargs["content_type"] = "application/octet-stream" + + super().__init__(value, *args, **kwargs) + + if isinstance(value, memoryview): + self._size = value.nbytes + else: + self._size = len(value) + + if self._size > TOO_LARGE_BYTES_BODY: + if PY_36: + kwargs = {"source": self} + else: + kwargs = {} + warnings.warn( + "Sending a large body directly with raw bytes might" + " lock the event loop. You should probably pass an " + "io.BytesIO object instead", + ResourceWarning, + **kwargs, + ) + + async def write(self, writer: AbstractStreamWriter) -> None: + await writer.write(self._value) + + +class StringPayload(BytesPayload): + def __init__( + self, + value: str, + *args: Any, + encoding: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any, + ) -> None: + + if encoding is None: + if content_type is None: + real_encoding = "utf-8" + content_type = "text/plain; charset=utf-8" + else: + mimetype = parse_mimetype(content_type) + real_encoding = mimetype.parameters.get("charset", "utf-8") + else: + if content_type is None: + content_type = "text/plain; charset=%s" % encoding + real_encoding = encoding + + super().__init__( + value.encode(real_encoding), + encoding=real_encoding, + content_type=content_type, + *args, + **kwargs, + ) + + +class StringIOPayload(StringPayload): + def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: + super().__init__(value.read(), *args, **kwargs) + + +class IOBasePayload(Payload): + _value: IO[Any] + + def __init__( + self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any + ) -> None: + if "filename" not in kwargs: + kwargs["filename"] = guess_filename(value) + + super().__init__(value, *args, **kwargs) + + if self._filename is not None and disposition is not None: + if hdrs.CONTENT_DISPOSITION not in self.headers: + self.set_content_disposition(disposition, filename=self._filename) + + async def write(self, writer: AbstractStreamWriter) -> None: + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + while chunk: + await writer.write(chunk) + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + finally: + await loop.run_in_executor(None, self._value.close) + + +class TextIOPayload(IOBasePayload): + _value: TextIO + + def __init__( + self, + value: TextIO, + *args: Any, + encoding: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any, + ) -> None: + + if encoding is None: + if content_type is None: + encoding = "utf-8" + content_type = "text/plain; charset=utf-8" + else: + mimetype = parse_mimetype(content_type) + encoding = mimetype.parameters.get("charset", "utf-8") + else: + if content_type is None: + content_type = "text/plain; charset=%s" % encoding + + super().__init__( + value, + content_type=content_type, + encoding=encoding, + *args, + **kwargs, + ) + + @property + def size(self) -> Optional[int]: + try: + return os.fstat(self._value.fileno()).st_size - self._value.tell() + except OSError: + return None + + async def write(self, writer: AbstractStreamWriter) -> None: + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + while chunk: + data = ( + chunk.encode(encoding=self._encoding) + if self._encoding + else chunk.encode() + ) + await writer.write(data) + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + finally: + await loop.run_in_executor(None, self._value.close) + + +class BytesIOPayload(IOBasePayload): + @property + def size(self) -> int: + position = self._value.tell() + end = self._value.seek(0, os.SEEK_END) + self._value.seek(position) + return end - position + + +class BufferedReaderPayload(IOBasePayload): + @property + def size(self) -> Optional[int]: + try: + return os.fstat(self._value.fileno()).st_size - self._value.tell() + except OSError: + # data.fileno() is not supported, e.g. + # io.BufferedReader(io.BytesIO(b'data')) + return None + + +class JsonPayload(BytesPayload): + def __init__( + self, + value: Any, + encoding: str = "utf-8", + content_type: str = "application/json", + dumps: JSONEncoder = json.dumps, + *args: Any, + **kwargs: Any, + ) -> None: + + super().__init__( + dumps(value).encode(encoding), + content_type=content_type, + encoding=encoding, + *args, + **kwargs, + ) + + +if TYPE_CHECKING: # pragma: no cover + from typing import AsyncIterable, AsyncIterator + + _AsyncIterator = AsyncIterator[bytes] + _AsyncIterable = AsyncIterable[bytes] +else: + from collections.abc import AsyncIterable, AsyncIterator + + _AsyncIterator = AsyncIterator + _AsyncIterable = AsyncIterable + + +class AsyncIterablePayload(Payload): + + _iter: Optional[_AsyncIterator] = None + + def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: + if not isinstance(value, AsyncIterable): + raise TypeError( + "value argument must support " + "collections.abc.AsyncIterablebe interface, " + "got {!r}".format(type(value)) + ) + + if "content_type" not in kwargs: + kwargs["content_type"] = "application/octet-stream" + + super().__init__(value, *args, **kwargs) + + self._iter = value.__aiter__() + + async def write(self, writer: AbstractStreamWriter) -> None: + if self._iter: + try: + # iter is not None check prevents rare cases + # when the case iterable is used twice + while True: + chunk = await self._iter.__anext__() + await writer.write(chunk) + except StopAsyncIteration: + self._iter = None + + +class StreamReaderPayload(AsyncIterablePayload): + def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: + super().__init__(value.iter_any(), *args, **kwargs) + + +PAYLOAD_REGISTRY = PayloadRegistry() +PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview)) +PAYLOAD_REGISTRY.register(StringPayload, str) +PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO) +PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase) +PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO) +PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom)) +PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase) +PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader) +# try_last for giving a chance to more specialized async interables like +# multidict.BodyPartReaderPayload override the default +PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last) diff --git a/venv/lib/python3.10/site-packages/aiohttp/payload_streamer.py b/venv/lib/python3.10/site-packages/aiohttp/payload_streamer.py new file mode 100644 index 0000000..9f8b8bc --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/payload_streamer.py @@ -0,0 +1,75 @@ +""" +Payload implemenation for coroutines as data provider. + +As a simple case, you can upload data from file:: + + @aiohttp.streamer + async def file_sender(writer, file_name=None): + with open(file_name, 'rb') as f: + chunk = f.read(2**16) + while chunk: + await writer.write(chunk) + + chunk = f.read(2**16) + +Then you can use `file_sender` like this: + + async with session.post('http://httpbin.org/post', + data=file_sender(file_name='huge_file')) as resp: + print(await resp.text()) + +..note:: Coroutine must accept `writer` as first argument + +""" + +import types +import warnings +from typing import Any, Awaitable, Callable, Dict, Tuple + +from .abc import AbstractStreamWriter +from .payload import Payload, payload_type + +__all__ = ("streamer",) + + +class _stream_wrapper: + def __init__( + self, + coro: Callable[..., Awaitable[None]], + args: Tuple[Any, ...], + kwargs: Dict[str, Any], + ) -> None: + self.coro = types.coroutine(coro) + self.args = args + self.kwargs = kwargs + + async def __call__(self, writer: AbstractStreamWriter) -> None: + await self.coro(writer, *self.args, **self.kwargs) # type: ignore[operator] + + +class streamer: + def __init__(self, coro: Callable[..., Awaitable[None]]) -> None: + warnings.warn( + "@streamer is deprecated, use async generators instead", + DeprecationWarning, + stacklevel=2, + ) + self.coro = coro + + def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper: + return _stream_wrapper(self.coro, args, kwargs) + + +@payload_type(_stream_wrapper) +class StreamWrapperPayload(Payload): + async def write(self, writer: AbstractStreamWriter) -> None: + await self._value(writer) + + +@payload_type(streamer) +class StreamPayload(StreamWrapperPayload): + def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None: + super().__init__(value(), *args, **kwargs) + + async def write(self, writer: AbstractStreamWriter) -> None: + await self._value(writer) diff --git a/venv/lib/python3.10/site-packages/aiohttp/py.typed b/venv/lib/python3.10/site-packages/aiohttp/py.typed new file mode 100644 index 0000000..f5642f7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/py.typed @@ -0,0 +1 @@ +Marker diff --git a/venv/lib/python3.10/site-packages/aiohttp/pytest_plugin.py b/venv/lib/python3.10/site-packages/aiohttp/pytest_plugin.py new file mode 100644 index 0000000..dd9a9f6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/pytest_plugin.py @@ -0,0 +1,391 @@ +import asyncio +import contextlib +import warnings +from collections.abc import Callable +from typing import Any, Awaitable, Callable, Dict, Generator, Optional, Union + +import pytest + +from aiohttp.helpers import PY_37, isasyncgenfunction +from aiohttp.web import Application + +from .test_utils import ( + BaseTestServer, + RawTestServer, + TestClient, + TestServer, + loop_context, + setup_test_loop, + teardown_test_loop, + unused_port as _unused_port, +) + +try: + import uvloop +except ImportError: # pragma: no cover + uvloop = None + +try: + import tokio +except ImportError: # pragma: no cover + tokio = None + +AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]] + + +def pytest_addoption(parser): # type: ignore[no-untyped-def] + parser.addoption( + "--aiohttp-fast", + action="store_true", + default=False, + help="run tests faster by disabling extra checks", + ) + parser.addoption( + "--aiohttp-loop", + action="store", + default="pyloop", + help="run tests with specific loop: pyloop, uvloop, tokio or all", + ) + parser.addoption( + "--aiohttp-enable-loop-debug", + action="store_true", + default=False, + help="enable event loop debug mode", + ) + + +def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def] + """Set up pytest fixture. + + Allow fixtures to be coroutines. Run coroutine fixtures in an event loop. + """ + func = fixturedef.func + + if isasyncgenfunction(func): + # async generator fixture + is_async_gen = True + elif asyncio.iscoroutinefunction(func): + # regular async fixture + is_async_gen = False + else: + # not an async fixture, nothing to do + return + + strip_request = False + if "request" not in fixturedef.argnames: + fixturedef.argnames += ("request",) + strip_request = True + + def wrapper(*args, **kwargs): # type: ignore[no-untyped-def] + request = kwargs["request"] + if strip_request: + del kwargs["request"] + + # if neither the fixture nor the test use the 'loop' fixture, + # 'getfixturevalue' will fail because the test is not parameterized + # (this can be removed someday if 'loop' is no longer parameterized) + if "loop" not in request.fixturenames: + raise Exception( + "Asynchronous fixtures must depend on the 'loop' fixture or " + "be used in tests depending from it." + ) + + _loop = request.getfixturevalue("loop") + + if is_async_gen: + # for async generators, we need to advance the generator once, + # then advance it again in a finalizer + gen = func(*args, **kwargs) + + def finalizer(): # type: ignore[no-untyped-def] + try: + return _loop.run_until_complete(gen.__anext__()) + except StopAsyncIteration: + pass + + request.addfinalizer(finalizer) + return _loop.run_until_complete(gen.__anext__()) + else: + return _loop.run_until_complete(func(*args, **kwargs)) + + fixturedef.func = wrapper + + +@pytest.fixture +def fast(request): # type: ignore[no-untyped-def] + """--fast config option""" + return request.config.getoption("--aiohttp-fast") + + +@pytest.fixture +def loop_debug(request): # type: ignore[no-untyped-def] + """--enable-loop-debug config option""" + return request.config.getoption("--aiohttp-enable-loop-debug") + + +@contextlib.contextmanager +def _runtime_warning_context(): # type: ignore[no-untyped-def] + """Context manager which checks for RuntimeWarnings. + + This exists specifically to + avoid "coroutine 'X' was never awaited" warnings being missed. + + If RuntimeWarnings occur in the context a RuntimeError is raised. + """ + with warnings.catch_warnings(record=True) as _warnings: + yield + rw = [ + "{w.filename}:{w.lineno}:{w.message}".format(w=w) + for w in _warnings + if w.category == RuntimeWarning + ] + if rw: + raise RuntimeError( + "{} Runtime Warning{},\n{}".format( + len(rw), "" if len(rw) == 1 else "s", "\n".join(rw) + ) + ) + + +@contextlib.contextmanager +def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def] + """Passthrough loop context. + + Sets up and tears down a loop unless one is passed in via the loop + argument when it's passed straight through. + """ + if loop: + # loop already exists, pass it straight through + yield loop + else: + # this shadows loop_context's standard behavior + loop = setup_test_loop() + yield loop + teardown_test_loop(loop, fast=fast) + + +def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def] + """Fix pytest collecting for coroutines.""" + if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj): + return list(collector._genfunctions(name, obj)) + + +def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def] + """Run coroutines in an event loop instead of a normal function call.""" + fast = pyfuncitem.config.getoption("--aiohttp-fast") + if asyncio.iscoroutinefunction(pyfuncitem.function): + existing_loop = pyfuncitem.funcargs.get( + "proactor_loop" + ) or pyfuncitem.funcargs.get("loop", None) + with _runtime_warning_context(): + with _passthrough_loop_context(existing_loop, fast=fast) as _loop: + testargs = { + arg: pyfuncitem.funcargs[arg] + for arg in pyfuncitem._fixtureinfo.argnames + } + _loop.run_until_complete(pyfuncitem.obj(**testargs)) + + return True + + +def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def] + if "loop_factory" not in metafunc.fixturenames: + return + + loops = metafunc.config.option.aiohttp_loop + avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy} + + if uvloop is not None: # pragma: no cover + avail_factories["uvloop"] = uvloop.EventLoopPolicy + + if tokio is not None: # pragma: no cover + avail_factories["tokio"] = tokio.EventLoopPolicy + + if loops == "all": + loops = "pyloop,uvloop?,tokio?" + + factories = {} # type: ignore[var-annotated] + for name in loops.split(","): + required = not name.endswith("?") + name = name.strip(" ?") + if name not in avail_factories: # pragma: no cover + if required: + raise ValueError( + "Unknown loop '%s', available loops: %s" + % (name, list(factories.keys())) + ) + else: + continue + factories[name] = avail_factories[name] + metafunc.parametrize( + "loop_factory", list(factories.values()), ids=list(factories.keys()) + ) + + +@pytest.fixture +def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def] + """Return an instance of the event loop.""" + policy = loop_factory() + asyncio.set_event_loop_policy(policy) + with loop_context(fast=fast) as _loop: + if loop_debug: + _loop.set_debug(True) # pragma: no cover + asyncio.set_event_loop(_loop) + yield _loop + + +@pytest.fixture +def proactor_loop(): # type: ignore[no-untyped-def] + if not PY_37: + policy = asyncio.get_event_loop_policy() + policy._loop_factory = asyncio.ProactorEventLoop # type: ignore[attr-defined] + else: + policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined] + asyncio.set_event_loop_policy(policy) + + with loop_context(policy.new_event_loop) as _loop: + asyncio.set_event_loop(_loop) + yield _loop + + +@pytest.fixture +def unused_port(aiohttp_unused_port): # type: ignore[no-untyped-def] # pragma: no cover + warnings.warn( + "Deprecated, use aiohttp_unused_port fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_unused_port + + +@pytest.fixture +def aiohttp_unused_port(): # type: ignore[no-untyped-def] + """Return a port that is unused on the current host.""" + return _unused_port + + +@pytest.fixture +def aiohttp_server(loop): # type: ignore[no-untyped-def] + """Factory to create a TestServer instance, given an app. + + aiohttp_server(app, **kwargs) + """ + servers = [] + + async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def] + server = TestServer(app, port=port) + await server.start_server(loop=loop, **kwargs) + servers.append(server) + return server + + yield go + + async def finalize() -> None: + while servers: + await servers.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover + warnings.warn( + "Deprecated, use aiohttp_server fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_server + + +@pytest.fixture +def aiohttp_raw_server(loop): # type: ignore[no-untyped-def] + """Factory to create a RawTestServer instance, given a web handler. + + aiohttp_raw_server(handler, **kwargs) + """ + servers = [] + + async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def] + server = RawTestServer(handler, port=port) + await server.start_server(loop=loop, **kwargs) + servers.append(server) + return server + + yield go + + async def finalize() -> None: + while servers: + await servers.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover + aiohttp_raw_server, +): + warnings.warn( + "Deprecated, use aiohttp_raw_server fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_raw_server + + +@pytest.fixture +def aiohttp_client( + loop: asyncio.AbstractEventLoop, +) -> Generator[AiohttpClient, None, None]: + """Factory to create a TestClient instance. + + aiohttp_client(app, **kwargs) + aiohttp_client(server, **kwargs) + aiohttp_client(raw_server, **kwargs) + """ + clients = [] + + async def go( + __param: Union[Application, BaseTestServer], + *args: Any, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any + ) -> TestClient: + + if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] + __param, (Application, BaseTestServer) + ): + __param = __param(loop, *args, **kwargs) + kwargs = {} + else: + assert not args, "args should be empty" + + if isinstance(__param, Application): + server_kwargs = server_kwargs or {} + server = TestServer(__param, loop=loop, **server_kwargs) + client = TestClient(server, loop=loop, **kwargs) + elif isinstance(__param, BaseTestServer): + client = TestClient(__param, loop=loop, **kwargs) + else: + raise ValueError("Unknown argument type: %r" % type(__param)) + + await client.start_server() + clients.append(client) + return client + + yield go + + async def finalize() -> None: + while clients: + await clients.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover + warnings.warn( + "Deprecated, use aiohttp_client fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_client diff --git a/venv/lib/python3.10/site-packages/aiohttp/resolver.py b/venv/lib/python3.10/site-packages/aiohttp/resolver.py new file mode 100644 index 0000000..531ce93 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/resolver.py @@ -0,0 +1,160 @@ +import asyncio +import socket +from typing import Any, Dict, List, Optional, Type, Union + +from .abc import AbstractResolver +from .helpers import get_running_loop + +__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver") + +try: + import aiodns + + # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname') +except ImportError: # pragma: no cover + aiodns = None + +aiodns_default = False + + +class ThreadedResolver(AbstractResolver): + """Threaded resolver. + + Uses an Executor for synchronous getaddrinfo() calls. + concurrent.futures.ThreadPoolExecutor is used by default. + """ + + def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + self._loop = get_running_loop(loop) + + async def resolve( + self, hostname: str, port: int = 0, family: int = socket.AF_INET + ) -> List[Dict[str, Any]]: + infos = await self._loop.getaddrinfo( + hostname, + port, + type=socket.SOCK_STREAM, + family=family, + flags=socket.AI_ADDRCONFIG, + ) + + hosts = [] + for family, _, proto, _, address in infos: + if family == socket.AF_INET6: + if len(address) < 3: + # IPv6 is not supported by Python build, + # or IPv6 is not enabled in the host + continue + if address[3]: # type: ignore[misc] + # This is essential for link-local IPv6 addresses. + # LL IPv6 is a VERY rare case. Strictly speaking, we should use + # getnameinfo() unconditionally, but performance makes sense. + host, _port = socket.getnameinfo( + address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV + ) + port = int(_port) + else: + host, port = address[:2] + else: # IPv4 + assert family == socket.AF_INET + host, port = address # type: ignore[misc] + hosts.append( + { + "hostname": hostname, + "host": host, + "port": port, + "family": family, + "proto": proto, + "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, + } + ) + + return hosts + + async def close(self) -> None: + pass + + +class AsyncResolver(AbstractResolver): + """Use the `aiodns` package to make asynchronous DNS lookups""" + + def __init__( + self, + loop: Optional[asyncio.AbstractEventLoop] = None, + *args: Any, + **kwargs: Any + ) -> None: + if aiodns is None: + raise RuntimeError("Resolver requires aiodns library") + + self._loop = get_running_loop(loop) + self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs) + + if not hasattr(self._resolver, "gethostbyname"): + # aiodns 1.1 is not available, fallback to DNSResolver.query + self.resolve = self._resolve_with_query # type: ignore + + async def resolve( + self, host: str, port: int = 0, family: int = socket.AF_INET + ) -> List[Dict[str, Any]]: + try: + resp = await self._resolver.gethostbyname(host, family) + except aiodns.error.DNSError as exc: + msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" + raise OSError(msg) from exc + hosts = [] + for address in resp.addresses: + hosts.append( + { + "hostname": host, + "host": address, + "port": port, + "family": family, + "proto": 0, + "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, + } + ) + + if not hosts: + raise OSError("DNS lookup failed") + + return hosts + + async def _resolve_with_query( + self, host: str, port: int = 0, family: int = socket.AF_INET + ) -> List[Dict[str, Any]]: + if family == socket.AF_INET6: + qtype = "AAAA" + else: + qtype = "A" + + try: + resp = await self._resolver.query(host, qtype) + except aiodns.error.DNSError as exc: + msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" + raise OSError(msg) from exc + + hosts = [] + for rr in resp: + hosts.append( + { + "hostname": host, + "host": rr.host, + "port": port, + "family": family, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + ) + + if not hosts: + raise OSError("DNS lookup failed") + + return hosts + + async def close(self) -> None: + self._resolver.cancel() + + +_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]] +DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver diff --git a/venv/lib/python3.10/site-packages/aiohttp/streams.py b/venv/lib/python3.10/site-packages/aiohttp/streams.py new file mode 100644 index 0000000..726b023 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/streams.py @@ -0,0 +1,660 @@ +import asyncio +import collections +import warnings +from typing import Awaitable, Callable, Deque, Generic, List, Optional, Tuple, TypeVar + +from .base_protocol import BaseProtocol +from .helpers import BaseTimerContext, set_exception, set_result +from .log import internal_logger +from .typedefs import Final + +__all__ = ( + "EMPTY_PAYLOAD", + "EofStream", + "StreamReader", + "DataQueue", + "FlowControlDataQueue", +) + +_T = TypeVar("_T") + + +class EofStream(Exception): + """eof stream indication.""" + + +class AsyncStreamIterator(Generic[_T]): + def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None: + self.read_func = read_func + + def __aiter__(self) -> "AsyncStreamIterator[_T]": + return self + + async def __anext__(self) -> _T: + try: + rv = await self.read_func() + except EofStream: + raise StopAsyncIteration + if rv == b"": + raise StopAsyncIteration + return rv + + +class ChunkTupleAsyncStreamIterator: + def __init__(self, stream: "StreamReader") -> None: + self._stream = stream + + def __aiter__(self) -> "ChunkTupleAsyncStreamIterator": + return self + + async def __anext__(self) -> Tuple[bytes, bool]: + rv = await self._stream.readchunk() + if rv == (b"", False): + raise StopAsyncIteration + return rv + + +class AsyncStreamReaderMixin: + def __aiter__(self) -> AsyncStreamIterator[bytes]: + return AsyncStreamIterator(self.readline) # type: ignore[attr-defined] + + def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: + """Returns an asynchronous iterator that yields chunks of size n. + + Python-3.5 available for Python 3.5+ only + """ + return AsyncStreamIterator( + lambda: self.read(n) # type: ignore[attr-defined,no-any-return] + ) + + def iter_any(self) -> AsyncStreamIterator[bytes]: + """Yield all available data as soon as it is received. + + Python-3.5 available for Python 3.5+ only + """ + return AsyncStreamIterator(self.readany) # type: ignore[attr-defined] + + def iter_chunks(self) -> ChunkTupleAsyncStreamIterator: + """Yield chunks of data as they are received by the server. + + The yielded objects are tuples + of (bytes, bool) as returned by the StreamReader.readchunk method. + + Python-3.5 available for Python 3.5+ only + """ + return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type] + + +class StreamReader(AsyncStreamReaderMixin): + """An enhancement of asyncio.StreamReader. + + Supports asynchronous iteration by line, chunk or as available:: + + async for line in reader: + ... + async for chunk in reader.iter_chunked(1024): + ... + async for slice in reader.iter_any(): + ... + + """ + + total_bytes = 0 + + def __init__( + self, + protocol: BaseProtocol, + limit: int, + *, + timer: Optional[BaseTimerContext] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + self._protocol = protocol + self._low_water = limit + self._high_water = limit * 2 + if loop is None: + loop = asyncio.get_event_loop() + self._loop = loop + self._size = 0 + self._cursor = 0 + self._http_chunk_splits: Optional[List[int]] = None + self._buffer: Deque[bytes] = collections.deque() + self._buffer_offset = 0 + self._eof = False + self._waiter: Optional[asyncio.Future[None]] = None + self._eof_waiter: Optional[asyncio.Future[None]] = None + self._exception: Optional[BaseException] = None + self._timer = timer + self._eof_callbacks: List[Callable[[], None]] = [] + + def __repr__(self) -> str: + info = [self.__class__.__name__] + if self._size: + info.append("%d bytes" % self._size) + if self._eof: + info.append("eof") + if self._low_water != 2**16: # default limit + info.append("low=%d high=%d" % (self._low_water, self._high_water)) + if self._waiter: + info.append("w=%r" % self._waiter) + if self._exception: + info.append("e=%r" % self._exception) + return "<%s>" % " ".join(info) + + def get_read_buffer_limits(self) -> Tuple[int, int]: + return (self._low_water, self._high_water) + + def exception(self) -> Optional[BaseException]: + return self._exception + + def set_exception(self, exc: BaseException) -> None: + self._exception = exc + self._eof_callbacks.clear() + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_exception(waiter, exc) + + waiter = self._eof_waiter + if waiter is not None: + self._eof_waiter = None + set_exception(waiter, exc) + + def on_eof(self, callback: Callable[[], None]) -> None: + if self._eof: + try: + callback() + except Exception: + internal_logger.exception("Exception in eof callback") + else: + self._eof_callbacks.append(callback) + + def feed_eof(self) -> None: + self._eof = True + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + waiter = self._eof_waiter + if waiter is not None: + self._eof_waiter = None + set_result(waiter, None) + + for cb in self._eof_callbacks: + try: + cb() + except Exception: + internal_logger.exception("Exception in eof callback") + + self._eof_callbacks.clear() + + def is_eof(self) -> bool: + """Return True if 'feed_eof' was called.""" + return self._eof + + def at_eof(self) -> bool: + """Return True if the buffer is empty and 'feed_eof' was called.""" + return self._eof and not self._buffer + + async def wait_eof(self) -> None: + if self._eof: + return + + assert self._eof_waiter is None + self._eof_waiter = self._loop.create_future() + try: + await self._eof_waiter + finally: + self._eof_waiter = None + + def unread_data(self, data: bytes) -> None: + """rollback reading some data from stream, inserting it to buffer head.""" + warnings.warn( + "unread_data() is deprecated " + "and will be removed in future releases (#3260)", + DeprecationWarning, + stacklevel=2, + ) + if not data: + return + + if self._buffer_offset: + self._buffer[0] = self._buffer[0][self._buffer_offset :] + self._buffer_offset = 0 + self._size += len(data) + self._cursor -= len(data) + self._buffer.appendleft(data) + self._eof_counter = 0 + + # TODO: size is ignored, remove the param later + def feed_data(self, data: bytes, size: int = 0) -> None: + assert not self._eof, "feed_data after feed_eof" + + if not data: + return + + self._size += len(data) + self._buffer.append(data) + self.total_bytes += len(data) + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + if self._size > self._high_water and not self._protocol._reading_paused: + self._protocol.pause_reading() + + def begin_http_chunk_receiving(self) -> None: + if self._http_chunk_splits is None: + if self.total_bytes: + raise RuntimeError( + "Called begin_http_chunk_receiving when" "some data was already fed" + ) + self._http_chunk_splits = [] + + def end_http_chunk_receiving(self) -> None: + if self._http_chunk_splits is None: + raise RuntimeError( + "Called end_chunk_receiving without calling " + "begin_chunk_receiving first" + ) + + # self._http_chunk_splits contains logical byte offsets from start of + # the body transfer. Each offset is the offset of the end of a chunk. + # "Logical" means bytes, accessible for a user. + # If no chunks containig logical data were received, current position + # is difinitely zero. + pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0 + + if self.total_bytes == pos: + # We should not add empty chunks here. So we check for that. + # Note, when chunked + gzip is used, we can receive a chunk + # of compressed data, but that data may not be enough for gzip FSM + # to yield any uncompressed data. That's why current position may + # not change after receiving a chunk. + return + + self._http_chunk_splits.append(self.total_bytes) + + # wake up readchunk when end of http chunk received + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + async def _wait(self, func_name: str) -> None: + # StreamReader uses a future to link the protocol feed_data() method + # to a read coroutine. Running two read coroutines at the same time + # would have an unexpected behaviour. It would not possible to know + # which coroutine would get the next data. + if self._waiter is not None: + raise RuntimeError( + "%s() called while another coroutine is " + "already waiting for incoming data" % func_name + ) + + waiter = self._waiter = self._loop.create_future() + try: + if self._timer: + with self._timer: + await waiter + else: + await waiter + finally: + self._waiter = None + + async def readline(self) -> bytes: + return await self.readuntil() + + async def readuntil(self, separator: bytes = b"\n") -> bytes: + seplen = len(separator) + if seplen == 0: + raise ValueError("Separator should be at least one-byte string") + + if self._exception is not None: + raise self._exception + + chunk = b"" + chunk_size = 0 + not_enough = True + + while not_enough: + while self._buffer and not_enough: + offset = self._buffer_offset + ichar = self._buffer[0].find(separator, offset) + 1 + # Read from current offset to found separator or to the end. + data = self._read_nowait_chunk(ichar - offset if ichar else -1) + chunk += data + chunk_size += len(data) + if ichar: + not_enough = False + + if chunk_size > self._high_water: + raise ValueError("Chunk too big") + + if self._eof: + break + + if not_enough: + await self._wait("readuntil") + + return chunk + + async def read(self, n: int = -1) -> bytes: + if self._exception is not None: + raise self._exception + + # migration problem; with DataQueue you have to catch + # EofStream exception, so common way is to run payload.read() inside + # infinite loop. what can cause real infinite loop with StreamReader + # lets keep this code one major release. + if __debug__: + if self._eof and not self._buffer: + self._eof_counter = getattr(self, "_eof_counter", 0) + 1 + if self._eof_counter > 5: + internal_logger.warning( + "Multiple access to StreamReader in eof state, " + "might be infinite loop.", + stack_info=True, + ) + + if not n: + return b"" + + if n < 0: + # This used to just loop creating a new waiter hoping to + # collect everything in self._buffer, but that would + # deadlock if the subprocess sends more than self.limit + # bytes. So just call self.readany() until EOF. + blocks = [] + while True: + block = await self.readany() + if not block: + break + blocks.append(block) + return b"".join(blocks) + + # TODO: should be `if` instead of `while` + # because waiter maybe triggered on chunk end, + # without feeding any data + while not self._buffer and not self._eof: + await self._wait("read") + + return self._read_nowait(n) + + async def readany(self) -> bytes: + if self._exception is not None: + raise self._exception + + # TODO: should be `if` instead of `while` + # because waiter maybe triggered on chunk end, + # without feeding any data + while not self._buffer and not self._eof: + await self._wait("readany") + + return self._read_nowait(-1) + + async def readchunk(self) -> Tuple[bytes, bool]: + """Returns a tuple of (data, end_of_http_chunk). + + When chunked transfer + encoding is used, end_of_http_chunk is a boolean indicating if the end + of the data corresponds to the end of a HTTP chunk , otherwise it is + always False. + """ + while True: + if self._exception is not None: + raise self._exception + + while self._http_chunk_splits: + pos = self._http_chunk_splits.pop(0) + if pos == self._cursor: + return (b"", True) + if pos > self._cursor: + return (self._read_nowait(pos - self._cursor), True) + internal_logger.warning( + "Skipping HTTP chunk end due to data " + "consumption beyond chunk boundary" + ) + + if self._buffer: + return (self._read_nowait_chunk(-1), False) + # return (self._read_nowait(-1), False) + + if self._eof: + # Special case for signifying EOF. + # (b'', True) is not a final return value actually. + return (b"", False) + + await self._wait("readchunk") + + async def readexactly(self, n: int) -> bytes: + if self._exception is not None: + raise self._exception + + blocks: List[bytes] = [] + while n > 0: + block = await self.read(n) + if not block: + partial = b"".join(blocks) + raise asyncio.IncompleteReadError(partial, len(partial) + n) + blocks.append(block) + n -= len(block) + + return b"".join(blocks) + + def read_nowait(self, n: int = -1) -> bytes: + # default was changed to be consistent with .read(-1) + # + # I believe the most users don't know about the method and + # they are not affected. + if self._exception is not None: + raise self._exception + + if self._waiter and not self._waiter.done(): + raise RuntimeError( + "Called while some coroutine is waiting for incoming data." + ) + + return self._read_nowait(n) + + def _read_nowait_chunk(self, n: int) -> bytes: + first_buffer = self._buffer[0] + offset = self._buffer_offset + if n != -1 and len(first_buffer) - offset > n: + data = first_buffer[offset : offset + n] + self._buffer_offset += n + + elif offset: + self._buffer.popleft() + data = first_buffer[offset:] + self._buffer_offset = 0 + + else: + data = self._buffer.popleft() + + self._size -= len(data) + self._cursor += len(data) + + chunk_splits = self._http_chunk_splits + # Prevent memory leak: drop useless chunk splits + while chunk_splits and chunk_splits[0] < self._cursor: + chunk_splits.pop(0) + + if self._size < self._low_water and self._protocol._reading_paused: + self._protocol.resume_reading() + return data + + def _read_nowait(self, n: int) -> bytes: + """Read not more than n bytes, or whole buffer if n == -1""" + chunks = [] + + while self._buffer: + chunk = self._read_nowait_chunk(n) + chunks.append(chunk) + if n != -1: + n -= len(chunk) + if n == 0: + break + + return b"".join(chunks) if chunks else b"" + + +class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init] + def __init__(self) -> None: + pass + + def exception(self) -> Optional[BaseException]: + return None + + def set_exception(self, exc: BaseException) -> None: + pass + + def on_eof(self, callback: Callable[[], None]) -> None: + try: + callback() + except Exception: + internal_logger.exception("Exception in eof callback") + + def feed_eof(self) -> None: + pass + + def is_eof(self) -> bool: + return True + + def at_eof(self) -> bool: + return True + + async def wait_eof(self) -> None: + return + + def feed_data(self, data: bytes, n: int = 0) -> None: + pass + + async def readline(self) -> bytes: + return b"" + + async def read(self, n: int = -1) -> bytes: + return b"" + + # TODO add async def readuntil + + async def readany(self) -> bytes: + return b"" + + async def readchunk(self) -> Tuple[bytes, bool]: + return (b"", True) + + async def readexactly(self, n: int) -> bytes: + raise asyncio.IncompleteReadError(b"", n) + + def read_nowait(self, n: int = -1) -> bytes: + return b"" + + +EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader() + + +class DataQueue(Generic[_T]): + """DataQueue is a general-purpose blocking queue with one reader.""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._eof = False + self._waiter: Optional[asyncio.Future[None]] = None + self._exception: Optional[BaseException] = None + self._size = 0 + self._buffer: Deque[Tuple[_T, int]] = collections.deque() + + def __len__(self) -> int: + return len(self._buffer) + + def is_eof(self) -> bool: + return self._eof + + def at_eof(self) -> bool: + return self._eof and not self._buffer + + def exception(self) -> Optional[BaseException]: + return self._exception + + def set_exception(self, exc: BaseException) -> None: + self._eof = True + self._exception = exc + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_exception(waiter, exc) + + def feed_data(self, data: _T, size: int = 0) -> None: + self._size += size + self._buffer.append((data, size)) + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + def feed_eof(self) -> None: + self._eof = True + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + async def read(self) -> _T: + if not self._buffer and not self._eof: + assert not self._waiter + self._waiter = self._loop.create_future() + try: + await self._waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._waiter = None + raise + + if self._buffer: + data, size = self._buffer.popleft() + self._size -= size + return data + else: + if self._exception is not None: + raise self._exception + else: + raise EofStream + + def __aiter__(self) -> AsyncStreamIterator[_T]: + return AsyncStreamIterator(self.read) + + +class FlowControlDataQueue(DataQueue[_T]): + """FlowControlDataQueue resumes and pauses an underlying stream. + + It is a destination for parsed data. + """ + + def __init__( + self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop + ) -> None: + super().__init__(loop=loop) + + self._protocol = protocol + self._limit = limit * 2 + + def feed_data(self, data: _T, size: int = 0) -> None: + super().feed_data(data, size) + + if self._size > self._limit and not self._protocol._reading_paused: + self._protocol.pause_reading() + + async def read(self) -> _T: + try: + return await super().read() + finally: + if self._size < self._limit and self._protocol._reading_paused: + self._protocol.resume_reading() diff --git a/venv/lib/python3.10/site-packages/aiohttp/tcp_helpers.py b/venv/lib/python3.10/site-packages/aiohttp/tcp_helpers.py new file mode 100644 index 0000000..88b2442 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/tcp_helpers.py @@ -0,0 +1,37 @@ +"""Helper methods to tune a TCP connection""" + +import asyncio +import socket +from contextlib import suppress +from typing import Optional # noqa + +__all__ = ("tcp_keepalive", "tcp_nodelay") + + +if hasattr(socket, "SO_KEEPALIVE"): + + def tcp_keepalive(transport: asyncio.Transport) -> None: + sock = transport.get_extra_info("socket") + if sock is not None: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + +else: + + def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover + pass + + +def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None: + sock = transport.get_extra_info("socket") + + if sock is None: + return + + if sock.family not in (socket.AF_INET, socket.AF_INET6): + return + + value = bool(value) + + # socket may be closed already, on windows OSError get raised + with suppress(OSError): + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value) diff --git a/venv/lib/python3.10/site-packages/aiohttp/test_utils.py b/venv/lib/python3.10/site-packages/aiohttp/test_utils.py new file mode 100644 index 0000000..fcda2f3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/test_utils.py @@ -0,0 +1,706 @@ +"""Utilities shared by tests.""" + +import asyncio +import contextlib +import gc +import inspect +import ipaddress +import os +import socket +import sys +import warnings +from abc import ABC, abstractmethod +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Iterator, + List, + Optional, + Type, + Union, + cast, +) +from unittest import mock + +from aiosignal import Signal +from multidict import CIMultiDict, CIMultiDictProxy +from yarl import URL + +import aiohttp +from aiohttp.client import _RequestContextManager, _WSRequestContextManager + +from . import ClientSession, hdrs +from .abc import AbstractCookieJar +from .client_reqrep import ClientResponse +from .client_ws import ClientWebSocketResponse +from .helpers import PY_38, sentinel +from .http import HttpVersion, RawRequestMessage +from .web import ( + Application, + AppRunner, + BaseRunner, + Request, + Server, + ServerRunner, + SockSite, + UrlMappingMatchInfo, +) +from .web_protocol import _RequestHandler + +if TYPE_CHECKING: # pragma: no cover + from ssl import SSLContext +else: + SSLContext = None + +if PY_38: + from unittest import IsolatedAsyncioTestCase as TestCase +else: + from asynctest import TestCase # type: ignore[no-redef] + +REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin" + + +def get_unused_port_socket( + host: str, family: socket.AddressFamily = socket.AF_INET +) -> socket.socket: + return get_port_socket(host, 0, family) + + +def get_port_socket( + host: str, port: int, family: socket.AddressFamily +) -> socket.socket: + s = socket.socket(family, socket.SOCK_STREAM) + if REUSE_ADDRESS: + # Windows has different semantics for SO_REUSEADDR, + # so don't set it. Ref: + # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + s.bind((host, port)) + return s + + +def unused_port() -> int: + """Return a port that is unused on the current host.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(("127.0.0.1", 0)) + return cast(int, s.getsockname()[1]) + + +class BaseTestServer(ABC): + __test__ = False + + def __init__( + self, + *, + scheme: Union[str, object] = sentinel, + loop: Optional[asyncio.AbstractEventLoop] = None, + host: str = "127.0.0.1", + port: Optional[int] = None, + skip_url_asserts: bool = False, + socket_factory: Callable[ + [str, int, socket.AddressFamily], socket.socket + ] = get_port_socket, + **kwargs: Any, + ) -> None: + self._loop = loop + self.runner: Optional[BaseRunner] = None + self._root: Optional[URL] = None + self.host = host + self.port = port + self._closed = False + self.scheme = scheme + self.skip_url_asserts = skip_url_asserts + self.socket_factory = socket_factory + + async def start_server( + self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any + ) -> None: + if self.runner: + return + self._loop = loop + self._ssl = kwargs.pop("ssl", None) + self.runner = await self._make_runner(**kwargs) + await self.runner.setup() + if not self.port: + self.port = 0 + try: + version = ipaddress.ip_address(self.host).version + except ValueError: + version = 4 + family = socket.AF_INET6 if version == 6 else socket.AF_INET + _sock = self.socket_factory(self.host, self.port, family) + self.host, self.port = _sock.getsockname()[:2] + site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl) + await site.start() + server = site._server + assert server is not None + sockets = server.sockets + assert sockets is not None + self.port = sockets[0].getsockname()[1] + if self.scheme is sentinel: + if self._ssl: + scheme = "https" + else: + scheme = "http" + self.scheme = scheme + self._root = URL(f"{self.scheme}://{self.host}:{self.port}") + + @abstractmethod # pragma: no cover + async def _make_runner(self, **kwargs: Any) -> BaseRunner: + pass + + def make_url(self, path: str) -> URL: + assert self._root is not None + url = URL(path) + if not self.skip_url_asserts: + assert not url.is_absolute() + return self._root.join(url) + else: + return URL(str(self._root) + path) + + @property + def started(self) -> bool: + return self.runner is not None + + @property + def closed(self) -> bool: + return self._closed + + @property + def handler(self) -> Server: + # for backward compatibility + # web.Server instance + runner = self.runner + assert runner is not None + assert runner.server is not None + return runner.server + + async def close(self) -> None: + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run when the object is garbage collected, and on + exit when used as a context manager. + + """ + if self.started and not self.closed: + assert self.runner is not None + await self.runner.cleanup() + self._root = None + self.port = None + self._closed = True + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "BaseTestServer": + await self.start_server(loop=self._loop) + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + await self.close() + + +class TestServer(BaseTestServer): + def __init__( + self, + app: Application, + *, + scheme: Union[str, object] = sentinel, + host: str = "127.0.0.1", + port: Optional[int] = None, + **kwargs: Any, + ): + self.app = app + super().__init__(scheme=scheme, host=host, port=port, **kwargs) + + async def _make_runner(self, **kwargs: Any) -> BaseRunner: + return AppRunner(self.app, **kwargs) + + +class RawTestServer(BaseTestServer): + def __init__( + self, + handler: _RequestHandler, + *, + scheme: Union[str, object] = sentinel, + host: str = "127.0.0.1", + port: Optional[int] = None, + **kwargs: Any, + ) -> None: + self._handler = handler + super().__init__(scheme=scheme, host=host, port=port, **kwargs) + + async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner: + srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs) + return ServerRunner(srv, debug=debug, **kwargs) + + +class TestClient: + """ + A test client implementation. + + To write functional tests for aiohttp based servers. + + """ + + __test__ = False + + def __init__( + self, + server: BaseTestServer, + *, + cookie_jar: Optional[AbstractCookieJar] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Any, + ) -> None: + if not isinstance(server, BaseTestServer): + raise TypeError( + "server must be TestServer " "instance, found type: %r" % type(server) + ) + self._server = server + self._loop = loop + if cookie_jar is None: + cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop) + self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs) + self._closed = False + self._responses: List[ClientResponse] = [] + self._websockets: List[ClientWebSocketResponse] = [] + + async def start_server(self) -> None: + await self._server.start_server(loop=self._loop) + + @property + def host(self) -> str: + return self._server.host + + @property + def port(self) -> Optional[int]: + return self._server.port + + @property + def server(self) -> BaseTestServer: + return self._server + + @property + def app(self) -> Optional[Application]: + return cast(Optional[Application], getattr(self._server, "app", None)) + + @property + def session(self) -> ClientSession: + """An internal aiohttp.ClientSession. + + Unlike the methods on the TestClient, client session requests + do not automatically include the host in the url queried, and + will require an absolute path to the resource. + + """ + return self._session + + def make_url(self, path: str) -> URL: + return self._server.make_url(path) + + async def _request(self, method: str, path: str, **kwargs: Any) -> ClientResponse: + resp = await self._session.request(method, self.make_url(path), **kwargs) + # save it to close later + self._responses.append(resp) + return resp + + def request(self, method: str, path: str, **kwargs: Any) -> _RequestContextManager: + """Routes a request to tested http server. + + The interface is identical to aiohttp.ClientSession.request, + except the loop kwarg is overridden by the instance used by the + test server. + + """ + return _RequestContextManager(self._request(method, path, **kwargs)) + + def get(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP GET request.""" + return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) + + def post(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP POST request.""" + return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) + + def options(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP OPTIONS request.""" + return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs)) + + def head(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP HEAD request.""" + return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) + + def put(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PUT request.""" + return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) + + def patch(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs)) + + def delete(self, path: str, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs)) + + def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager: + """Initiate websocket connection. + + The api corresponds to aiohttp.ClientSession.ws_connect. + + """ + return _WSRequestContextManager(self._ws_connect(path, **kwargs)) + + async def _ws_connect(self, path: str, **kwargs: Any) -> ClientWebSocketResponse: + ws = await self._session.ws_connect(self.make_url(path), **kwargs) + self._websockets.append(ws) + return ws + + async def close(self) -> None: + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run on exit when used as a(n) (asynchronous) + context manager. + + """ + if not self._closed: + for resp in self._responses: + resp.close() + for ws in self._websockets: + await ws.close() + await self._session.close() + await self._server.close() + self._closed = True + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "TestClient": + await self.start_server() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + await self.close() + + +class AioHTTPTestCase(TestCase): + """A base class to allow for unittest web applications using aiohttp. + + Provides the following: + + * self.client (aiohttp.test_utils.TestClient): an aiohttp test client. + * self.loop (asyncio.BaseEventLoop): the event loop in which the + application and server are running. + * self.app (aiohttp.web.Application): the application returned by + self.get_application() + + Note that the TestClient's methods are asynchronous: you have to + execute function on the test client using asynchronous methods. + """ + + async def get_application(self) -> Application: + """Get application. + + This method should be overridden + to return the aiohttp.web.Application + object to test. + """ + return self.get_app() + + def get_app(self) -> Application: + """Obsolete method used to constructing web application. + + Use .get_application() coroutine instead. + """ + raise RuntimeError("Did you forget to define get_application()?") + + def setUp(self) -> None: + if not PY_38: + asyncio.get_event_loop().run_until_complete(self.asyncSetUp()) + + async def asyncSetUp(self) -> None: + try: + self.loop = asyncio.get_running_loop() + except (AttributeError, RuntimeError): # AttributeError->py36 + self.loop = asyncio.get_event_loop_policy().get_event_loop() + + return await self.setUpAsync() + + async def setUpAsync(self) -> None: + self.app = await self.get_application() + self.server = await self.get_server(self.app) + self.client = await self.get_client(self.server) + + await self.client.start_server() + + def tearDown(self) -> None: + if not PY_38: + self.loop.run_until_complete(self.asyncTearDown()) + + async def asyncTearDown(self) -> None: + return await self.tearDownAsync() + + async def tearDownAsync(self) -> None: + await self.client.close() + + async def get_server(self, app: Application) -> TestServer: + """Return a TestServer instance.""" + return TestServer(app, loop=self.loop) + + async def get_client(self, server: TestServer) -> TestClient: + """Return a TestClient instance.""" + return TestClient(server, loop=self.loop) + + +def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any: + """ + A decorator dedicated to use with asynchronous AioHTTPTestCase test methods. + + In 3.8+, this does nothing. + """ + warnings.warn( + "Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+", + DeprecationWarning, + stacklevel=2, + ) + return func + + +_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop] + + +@contextlib.contextmanager +def loop_context( + loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False +) -> Iterator[asyncio.AbstractEventLoop]: + """A contextmanager that creates an event_loop, for test purposes. + + Handles the creation and cleanup of a test loop. + """ + loop = setup_test_loop(loop_factory) + yield loop + teardown_test_loop(loop, fast=fast) + + +def setup_test_loop( + loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, +) -> asyncio.AbstractEventLoop: + """Create and return an asyncio.BaseEventLoop instance. + + The caller should also call teardown_test_loop, + once they are done with the loop. + """ + loop = loop_factory() + try: + module = loop.__class__.__module__ + skip_watcher = "uvloop" in module + except AttributeError: # pragma: no cover + # Just in case + skip_watcher = True + asyncio.set_event_loop(loop) + if sys.platform != "win32" and not skip_watcher: + policy = asyncio.get_event_loop_policy() + watcher: asyncio.AbstractChildWatcher + try: # Python >= 3.8 + # Refs: + # * https://github.com/pytest-dev/pytest-xdist/issues/620 + # * https://stackoverflow.com/a/58614689/595220 + # * https://bugs.python.org/issue35621 + # * https://github.com/python/cpython/pull/14344 + watcher = asyncio.ThreadedChildWatcher() + except AttributeError: # Python < 3.8 + watcher = asyncio.SafeChildWatcher() + watcher.attach_loop(loop) + with contextlib.suppress(NotImplementedError): + policy.set_child_watcher(watcher) + return loop + + +def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None: + """Teardown and cleanup an event_loop created by setup_test_loop.""" + closed = loop.is_closed() + if not closed: + loop.call_soon(loop.stop) + loop.run_forever() + loop.close() + + if not fast: + gc.collect() + + asyncio.set_event_loop(None) + + +def _create_app_mock() -> mock.MagicMock: + def get_dict(app: Any, key: str) -> Any: + return app.__app_dict[key] + + def set_dict(app: Any, key: str, value: Any) -> None: + app.__app_dict[key] = value + + app = mock.MagicMock(spec=Application) + app.__app_dict = {} + app.__getitem__ = get_dict + app.__setitem__ = set_dict + + app._debug = False + app.on_response_prepare = Signal(app) + app.on_response_prepare.freeze() + return app + + +def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock: + transport = mock.Mock() + + def get_extra_info(key: str) -> Optional[SSLContext]: + if key == "sslcontext": + return sslcontext + else: + return None + + transport.get_extra_info.side_effect = get_extra_info + return transport + + +def make_mocked_request( + method: str, + path: str, + headers: Any = None, + *, + match_info: Any = sentinel, + version: HttpVersion = HttpVersion(1, 1), + closing: bool = False, + app: Any = None, + writer: Any = sentinel, + protocol: Any = sentinel, + transport: Any = sentinel, + payload: Any = sentinel, + sslcontext: Optional[SSLContext] = None, + client_max_size: int = 1024**2, + loop: Any = ..., +) -> Request: + """Creates mocked web.Request testing purposes. + + Useful in unit tests, when spinning full web server is overkill or + specific conditions and errors are hard to trigger. + """ + task = mock.Mock() + if loop is ...: + loop = mock.Mock() + loop.create_future.return_value = () + + if version < HttpVersion(1, 1): + closing = True + + if headers: + headers = CIMultiDictProxy(CIMultiDict(headers)) + raw_hdrs = tuple( + (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() + ) + else: + headers = CIMultiDictProxy(CIMultiDict()) + raw_hdrs = () + + chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower() + + message = RawRequestMessage( + method, + path, + version, + headers, + raw_hdrs, + closing, + None, + False, + chunked, + URL(path), + ) + if app is None: + app = _create_app_mock() + + if transport is sentinel: + transport = _create_transport(sslcontext) + + if protocol is sentinel: + protocol = mock.Mock() + protocol.transport = transport + + if writer is sentinel: + writer = mock.Mock() + writer.write_headers = make_mocked_coro(None) + writer.write = make_mocked_coro(None) + writer.write_eof = make_mocked_coro(None) + writer.drain = make_mocked_coro(None) + writer.transport = transport + + protocol.transport = transport + protocol.writer = writer + + if payload is sentinel: + payload = mock.Mock() + + req = Request( + message, payload, protocol, writer, task, loop, client_max_size=client_max_size + ) + + match_info = UrlMappingMatchInfo( + {} if match_info is sentinel else match_info, mock.Mock() + ) + match_info.add_app(app) + req._match_info = match_info + + return req + + +def make_mocked_coro( + return_value: Any = sentinel, raise_exception: Any = sentinel +) -> Any: + """Creates a coroutine mock.""" + + async def mock_coro(*args: Any, **kwargs: Any) -> Any: + if raise_exception is not sentinel: + raise raise_exception + if not inspect.isawaitable(return_value): + return return_value + await return_value + + return mock.Mock(wraps=mock_coro) diff --git a/venv/lib/python3.10/site-packages/aiohttp/tracing.py b/venv/lib/python3.10/site-packages/aiohttp/tracing.py new file mode 100644 index 0000000..d5596a4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/tracing.py @@ -0,0 +1,472 @@ +from types import SimpleNamespace +from typing import TYPE_CHECKING, Awaitable, Optional, Type, TypeVar + +import attr +from aiosignal import Signal +from multidict import CIMultiDict +from yarl import URL + +from .client_reqrep import ClientResponse + +if TYPE_CHECKING: # pragma: no cover + from .client import ClientSession + from .typedefs import Protocol + + _ParamT_contra = TypeVar("_ParamT_contra", contravariant=True) + + class _SignalCallback(Protocol[_ParamT_contra]): + def __call__( + self, + __client_session: ClientSession, + __trace_config_ctx: SimpleNamespace, + __params: _ParamT_contra, + ) -> Awaitable[None]: + ... + + +__all__ = ( + "TraceConfig", + "TraceRequestStartParams", + "TraceRequestEndParams", + "TraceRequestExceptionParams", + "TraceConnectionQueuedStartParams", + "TraceConnectionQueuedEndParams", + "TraceConnectionCreateStartParams", + "TraceConnectionCreateEndParams", + "TraceConnectionReuseconnParams", + "TraceDnsResolveHostStartParams", + "TraceDnsResolveHostEndParams", + "TraceDnsCacheHitParams", + "TraceDnsCacheMissParams", + "TraceRequestRedirectParams", + "TraceRequestChunkSentParams", + "TraceResponseChunkReceivedParams", + "TraceRequestHeadersSentParams", +) + + +class TraceConfig: + """First-class used to trace requests launched via ClientSession objects.""" + + def __init__( + self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace + ) -> None: + self._on_request_start: Signal[ + _SignalCallback[TraceRequestStartParams] + ] = Signal(self) + self._on_request_chunk_sent: Signal[ + _SignalCallback[TraceRequestChunkSentParams] + ] = Signal(self) + self._on_response_chunk_received: Signal[ + _SignalCallback[TraceResponseChunkReceivedParams] + ] = Signal(self) + self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal( + self + ) + self._on_request_exception: Signal[ + _SignalCallback[TraceRequestExceptionParams] + ] = Signal(self) + self._on_request_redirect: Signal[ + _SignalCallback[TraceRequestRedirectParams] + ] = Signal(self) + self._on_connection_queued_start: Signal[ + _SignalCallback[TraceConnectionQueuedStartParams] + ] = Signal(self) + self._on_connection_queued_end: Signal[ + _SignalCallback[TraceConnectionQueuedEndParams] + ] = Signal(self) + self._on_connection_create_start: Signal[ + _SignalCallback[TraceConnectionCreateStartParams] + ] = Signal(self) + self._on_connection_create_end: Signal[ + _SignalCallback[TraceConnectionCreateEndParams] + ] = Signal(self) + self._on_connection_reuseconn: Signal[ + _SignalCallback[TraceConnectionReuseconnParams] + ] = Signal(self) + self._on_dns_resolvehost_start: Signal[ + _SignalCallback[TraceDnsResolveHostStartParams] + ] = Signal(self) + self._on_dns_resolvehost_end: Signal[ + _SignalCallback[TraceDnsResolveHostEndParams] + ] = Signal(self) + self._on_dns_cache_hit: Signal[ + _SignalCallback[TraceDnsCacheHitParams] + ] = Signal(self) + self._on_dns_cache_miss: Signal[ + _SignalCallback[TraceDnsCacheMissParams] + ] = Signal(self) + self._on_request_headers_sent: Signal[ + _SignalCallback[TraceRequestHeadersSentParams] + ] = Signal(self) + + self._trace_config_ctx_factory = trace_config_ctx_factory + + def trace_config_ctx( + self, trace_request_ctx: Optional[SimpleNamespace] = None + ) -> SimpleNamespace: + """Return a new trace_config_ctx instance""" + return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx) + + def freeze(self) -> None: + self._on_request_start.freeze() + self._on_request_chunk_sent.freeze() + self._on_response_chunk_received.freeze() + self._on_request_end.freeze() + self._on_request_exception.freeze() + self._on_request_redirect.freeze() + self._on_connection_queued_start.freeze() + self._on_connection_queued_end.freeze() + self._on_connection_create_start.freeze() + self._on_connection_create_end.freeze() + self._on_connection_reuseconn.freeze() + self._on_dns_resolvehost_start.freeze() + self._on_dns_resolvehost_end.freeze() + self._on_dns_cache_hit.freeze() + self._on_dns_cache_miss.freeze() + self._on_request_headers_sent.freeze() + + @property + def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]": + return self._on_request_start + + @property + def on_request_chunk_sent( + self, + ) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]": + return self._on_request_chunk_sent + + @property + def on_response_chunk_received( + self, + ) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]": + return self._on_response_chunk_received + + @property + def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]": + return self._on_request_end + + @property + def on_request_exception( + self, + ) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]": + return self._on_request_exception + + @property + def on_request_redirect( + self, + ) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]": + return self._on_request_redirect + + @property + def on_connection_queued_start( + self, + ) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]": + return self._on_connection_queued_start + + @property + def on_connection_queued_end( + self, + ) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]": + return self._on_connection_queued_end + + @property + def on_connection_create_start( + self, + ) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]": + return self._on_connection_create_start + + @property + def on_connection_create_end( + self, + ) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]": + return self._on_connection_create_end + + @property + def on_connection_reuseconn( + self, + ) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]": + return self._on_connection_reuseconn + + @property + def on_dns_resolvehost_start( + self, + ) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]": + return self._on_dns_resolvehost_start + + @property + def on_dns_resolvehost_end( + self, + ) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]": + return self._on_dns_resolvehost_end + + @property + def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]": + return self._on_dns_cache_hit + + @property + def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]": + return self._on_dns_cache_miss + + @property + def on_request_headers_sent( + self, + ) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]": + return self._on_request_headers_sent + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestStartParams: + """Parameters sent by the `on_request_start` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestChunkSentParams: + """Parameters sent by the `on_request_chunk_sent` signal""" + + method: str + url: URL + chunk: bytes + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceResponseChunkReceivedParams: + """Parameters sent by the `on_response_chunk_received` signal""" + + method: str + url: URL + chunk: bytes + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestEndParams: + """Parameters sent by the `on_request_end` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + response: ClientResponse + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestExceptionParams: + """Parameters sent by the `on_request_exception` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + exception: BaseException + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestRedirectParams: + """Parameters sent by the `on_request_redirect` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + response: ClientResponse + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionQueuedStartParams: + """Parameters sent by the `on_connection_queued_start` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionQueuedEndParams: + """Parameters sent by the `on_connection_queued_end` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionCreateStartParams: + """Parameters sent by the `on_connection_create_start` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionCreateEndParams: + """Parameters sent by the `on_connection_create_end` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionReuseconnParams: + """Parameters sent by the `on_connection_reuseconn` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsResolveHostStartParams: + """Parameters sent by the `on_dns_resolvehost_start` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsResolveHostEndParams: + """Parameters sent by the `on_dns_resolvehost_end` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsCacheHitParams: + """Parameters sent by the `on_dns_cache_hit` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsCacheMissParams: + """Parameters sent by the `on_dns_cache_miss` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestHeadersSentParams: + """Parameters sent by the `on_request_headers_sent` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + + +class Trace: + """Internal dependency holder class. + + Used to keep together the main dependencies used + at the moment of send a signal. + """ + + def __init__( + self, + session: "ClientSession", + trace_config: TraceConfig, + trace_config_ctx: SimpleNamespace, + ) -> None: + self._trace_config = trace_config + self._trace_config_ctx = trace_config_ctx + self._session = session + + async def send_request_start( + self, method: str, url: URL, headers: "CIMultiDict[str]" + ) -> None: + return await self._trace_config.on_request_start.send( + self._session, + self._trace_config_ctx, + TraceRequestStartParams(method, url, headers), + ) + + async def send_request_chunk_sent( + self, method: str, url: URL, chunk: bytes + ) -> None: + return await self._trace_config.on_request_chunk_sent.send( + self._session, + self._trace_config_ctx, + TraceRequestChunkSentParams(method, url, chunk), + ) + + async def send_response_chunk_received( + self, method: str, url: URL, chunk: bytes + ) -> None: + return await self._trace_config.on_response_chunk_received.send( + self._session, + self._trace_config_ctx, + TraceResponseChunkReceivedParams(method, url, chunk), + ) + + async def send_request_end( + self, + method: str, + url: URL, + headers: "CIMultiDict[str]", + response: ClientResponse, + ) -> None: + return await self._trace_config.on_request_end.send( + self._session, + self._trace_config_ctx, + TraceRequestEndParams(method, url, headers, response), + ) + + async def send_request_exception( + self, + method: str, + url: URL, + headers: "CIMultiDict[str]", + exception: BaseException, + ) -> None: + return await self._trace_config.on_request_exception.send( + self._session, + self._trace_config_ctx, + TraceRequestExceptionParams(method, url, headers, exception), + ) + + async def send_request_redirect( + self, + method: str, + url: URL, + headers: "CIMultiDict[str]", + response: ClientResponse, + ) -> None: + return await self._trace_config._on_request_redirect.send( + self._session, + self._trace_config_ctx, + TraceRequestRedirectParams(method, url, headers, response), + ) + + async def send_connection_queued_start(self) -> None: + return await self._trace_config.on_connection_queued_start.send( + self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams() + ) + + async def send_connection_queued_end(self) -> None: + return await self._trace_config.on_connection_queued_end.send( + self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams() + ) + + async def send_connection_create_start(self) -> None: + return await self._trace_config.on_connection_create_start.send( + self._session, self._trace_config_ctx, TraceConnectionCreateStartParams() + ) + + async def send_connection_create_end(self) -> None: + return await self._trace_config.on_connection_create_end.send( + self._session, self._trace_config_ctx, TraceConnectionCreateEndParams() + ) + + async def send_connection_reuseconn(self) -> None: + return await self._trace_config.on_connection_reuseconn.send( + self._session, self._trace_config_ctx, TraceConnectionReuseconnParams() + ) + + async def send_dns_resolvehost_start(self, host: str) -> None: + return await self._trace_config.on_dns_resolvehost_start.send( + self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host) + ) + + async def send_dns_resolvehost_end(self, host: str) -> None: + return await self._trace_config.on_dns_resolvehost_end.send( + self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host) + ) + + async def send_dns_cache_hit(self, host: str) -> None: + return await self._trace_config.on_dns_cache_hit.send( + self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host) + ) + + async def send_dns_cache_miss(self, host: str) -> None: + return await self._trace_config.on_dns_cache_miss.send( + self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host) + ) + + async def send_request_headers( + self, method: str, url: URL, headers: "CIMultiDict[str]" + ) -> None: + return await self._trace_config._on_request_headers_sent.send( + self._session, + self._trace_config_ctx, + TraceRequestHeadersSentParams(method, url, headers), + ) diff --git a/venv/lib/python3.10/site-packages/aiohttp/typedefs.py b/venv/lib/python3.10/site-packages/aiohttp/typedefs.py new file mode 100644 index 0000000..84283d9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/typedefs.py @@ -0,0 +1,64 @@ +import json +import os +import sys +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Iterable, + Mapping, + Tuple, + Union, +) + +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr +from yarl import URL + +# These are for other modules to use (to avoid repeating the conditional import). +if sys.version_info >= (3, 8): + from typing import Final as Final, Protocol as Protocol, TypedDict as TypedDict +else: + from typing_extensions import ( # noqa: F401 + Final, + Protocol as Protocol, + TypedDict as TypedDict, + ) + +DEFAULT_JSON_ENCODER = json.dumps +DEFAULT_JSON_DECODER = json.loads + +if TYPE_CHECKING: # pragma: no cover + _CIMultiDict = CIMultiDict[str] + _CIMultiDictProxy = CIMultiDictProxy[str] + _MultiDict = MultiDict[str] + _MultiDictProxy = MultiDictProxy[str] + from http.cookies import BaseCookie, Morsel + + from .web import Request, StreamResponse +else: + _CIMultiDict = CIMultiDict + _CIMultiDictProxy = CIMultiDictProxy + _MultiDict = MultiDict + _MultiDictProxy = MultiDictProxy + +Byteish = Union[bytes, bytearray, memoryview] +JSONEncoder = Callable[[Any], str] +JSONDecoder = Callable[[str], Any] +LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy] +RawHeaders = Tuple[Tuple[bytes, bytes], ...] +StrOrURL = Union[str, URL] + +LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] +LooseCookiesIterables = Iterable[ + Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] +] +LooseCookies = Union[ + LooseCookiesMappings, + LooseCookiesIterables, + "BaseCookie[str]", +] + +Handler = Callable[["Request"], Awaitable["StreamResponse"]] + +PathLike = Union[str, "os.PathLike[str]"] diff --git a/venv/lib/python3.10/site-packages/aiohttp/web.py b/venv/lib/python3.10/site-packages/aiohttp/web.py new file mode 100644 index 0000000..cefae2b --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web.py @@ -0,0 +1,588 @@ +import asyncio +import logging +import socket +import sys +from argparse import ArgumentParser +from collections.abc import Iterable +from importlib import import_module +from typing import ( + Any, + Awaitable, + Callable, + Iterable as TypingIterable, + List, + Optional, + Set, + Type, + Union, + cast, +) + +from .abc import AbstractAccessLogger +from .helpers import all_tasks +from .log import access_logger +from .web_app import Application as Application, CleanupError as CleanupError +from .web_exceptions import ( + HTTPAccepted as HTTPAccepted, + HTTPBadGateway as HTTPBadGateway, + HTTPBadRequest as HTTPBadRequest, + HTTPClientError as HTTPClientError, + HTTPConflict as HTTPConflict, + HTTPCreated as HTTPCreated, + HTTPError as HTTPError, + HTTPException as HTTPException, + HTTPExpectationFailed as HTTPExpectationFailed, + HTTPFailedDependency as HTTPFailedDependency, + HTTPForbidden as HTTPForbidden, + HTTPFound as HTTPFound, + HTTPGatewayTimeout as HTTPGatewayTimeout, + HTTPGone as HTTPGone, + HTTPInsufficientStorage as HTTPInsufficientStorage, + HTTPInternalServerError as HTTPInternalServerError, + HTTPLengthRequired as HTTPLengthRequired, + HTTPMethodNotAllowed as HTTPMethodNotAllowed, + HTTPMisdirectedRequest as HTTPMisdirectedRequest, + HTTPMovedPermanently as HTTPMovedPermanently, + HTTPMultipleChoices as HTTPMultipleChoices, + HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired, + HTTPNoContent as HTTPNoContent, + HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation, + HTTPNotAcceptable as HTTPNotAcceptable, + HTTPNotExtended as HTTPNotExtended, + HTTPNotFound as HTTPNotFound, + HTTPNotImplemented as HTTPNotImplemented, + HTTPNotModified as HTTPNotModified, + HTTPOk as HTTPOk, + HTTPPartialContent as HTTPPartialContent, + HTTPPaymentRequired as HTTPPaymentRequired, + HTTPPermanentRedirect as HTTPPermanentRedirect, + HTTPPreconditionFailed as HTTPPreconditionFailed, + HTTPPreconditionRequired as HTTPPreconditionRequired, + HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired, + HTTPRedirection as HTTPRedirection, + HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge, + HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge, + HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable, + HTTPRequestTimeout as HTTPRequestTimeout, + HTTPRequestURITooLong as HTTPRequestURITooLong, + HTTPResetContent as HTTPResetContent, + HTTPSeeOther as HTTPSeeOther, + HTTPServerError as HTTPServerError, + HTTPServiceUnavailable as HTTPServiceUnavailable, + HTTPSuccessful as HTTPSuccessful, + HTTPTemporaryRedirect as HTTPTemporaryRedirect, + HTTPTooManyRequests as HTTPTooManyRequests, + HTTPUnauthorized as HTTPUnauthorized, + HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons, + HTTPUnprocessableEntity as HTTPUnprocessableEntity, + HTTPUnsupportedMediaType as HTTPUnsupportedMediaType, + HTTPUpgradeRequired as HTTPUpgradeRequired, + HTTPUseProxy as HTTPUseProxy, + HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates, + HTTPVersionNotSupported as HTTPVersionNotSupported, +) +from .web_fileresponse import FileResponse as FileResponse +from .web_log import AccessLogger +from .web_middlewares import ( + middleware as middleware, + normalize_path_middleware as normalize_path_middleware, +) +from .web_protocol import ( + PayloadAccessError as PayloadAccessError, + RequestHandler as RequestHandler, + RequestPayloadError as RequestPayloadError, +) +from .web_request import ( + BaseRequest as BaseRequest, + FileField as FileField, + Request as Request, +) +from .web_response import ( + ContentCoding as ContentCoding, + Response as Response, + StreamResponse as StreamResponse, + json_response as json_response, +) +from .web_routedef import ( + AbstractRouteDef as AbstractRouteDef, + RouteDef as RouteDef, + RouteTableDef as RouteTableDef, + StaticDef as StaticDef, + delete as delete, + get as get, + head as head, + options as options, + patch as patch, + post as post, + put as put, + route as route, + static as static, + view as view, +) +from .web_runner import ( + AppRunner as AppRunner, + BaseRunner as BaseRunner, + BaseSite as BaseSite, + GracefulExit as GracefulExit, + NamedPipeSite as NamedPipeSite, + ServerRunner as ServerRunner, + SockSite as SockSite, + TCPSite as TCPSite, + UnixSite as UnixSite, +) +from .web_server import Server as Server +from .web_urldispatcher import ( + AbstractResource as AbstractResource, + AbstractRoute as AbstractRoute, + DynamicResource as DynamicResource, + PlainResource as PlainResource, + PrefixedSubAppResource as PrefixedSubAppResource, + Resource as Resource, + ResourceRoute as ResourceRoute, + StaticResource as StaticResource, + UrlDispatcher as UrlDispatcher, + UrlMappingMatchInfo as UrlMappingMatchInfo, + View as View, +) +from .web_ws import ( + WebSocketReady as WebSocketReady, + WebSocketResponse as WebSocketResponse, + WSMsgType as WSMsgType, +) + +__all__ = ( + # web_app + "Application", + "CleanupError", + # web_exceptions + "HTTPAccepted", + "HTTPBadGateway", + "HTTPBadRequest", + "HTTPClientError", + "HTTPConflict", + "HTTPCreated", + "HTTPError", + "HTTPException", + "HTTPExpectationFailed", + "HTTPFailedDependency", + "HTTPForbidden", + "HTTPFound", + "HTTPGatewayTimeout", + "HTTPGone", + "HTTPInsufficientStorage", + "HTTPInternalServerError", + "HTTPLengthRequired", + "HTTPMethodNotAllowed", + "HTTPMisdirectedRequest", + "HTTPMovedPermanently", + "HTTPMultipleChoices", + "HTTPNetworkAuthenticationRequired", + "HTTPNoContent", + "HTTPNonAuthoritativeInformation", + "HTTPNotAcceptable", + "HTTPNotExtended", + "HTTPNotFound", + "HTTPNotImplemented", + "HTTPNotModified", + "HTTPOk", + "HTTPPartialContent", + "HTTPPaymentRequired", + "HTTPPermanentRedirect", + "HTTPPreconditionFailed", + "HTTPPreconditionRequired", + "HTTPProxyAuthenticationRequired", + "HTTPRedirection", + "HTTPRequestEntityTooLarge", + "HTTPRequestHeaderFieldsTooLarge", + "HTTPRequestRangeNotSatisfiable", + "HTTPRequestTimeout", + "HTTPRequestURITooLong", + "HTTPResetContent", + "HTTPSeeOther", + "HTTPServerError", + "HTTPServiceUnavailable", + "HTTPSuccessful", + "HTTPTemporaryRedirect", + "HTTPTooManyRequests", + "HTTPUnauthorized", + "HTTPUnavailableForLegalReasons", + "HTTPUnprocessableEntity", + "HTTPUnsupportedMediaType", + "HTTPUpgradeRequired", + "HTTPUseProxy", + "HTTPVariantAlsoNegotiates", + "HTTPVersionNotSupported", + # web_fileresponse + "FileResponse", + # web_middlewares + "middleware", + "normalize_path_middleware", + # web_protocol + "PayloadAccessError", + "RequestHandler", + "RequestPayloadError", + # web_request + "BaseRequest", + "FileField", + "Request", + # web_response + "ContentCoding", + "Response", + "StreamResponse", + "json_response", + # web_routedef + "AbstractRouteDef", + "RouteDef", + "RouteTableDef", + "StaticDef", + "delete", + "get", + "head", + "options", + "patch", + "post", + "put", + "route", + "static", + "view", + # web_runner + "AppRunner", + "BaseRunner", + "BaseSite", + "GracefulExit", + "ServerRunner", + "SockSite", + "TCPSite", + "UnixSite", + "NamedPipeSite", + # web_server + "Server", + # web_urldispatcher + "AbstractResource", + "AbstractRoute", + "DynamicResource", + "PlainResource", + "PrefixedSubAppResource", + "Resource", + "ResourceRoute", + "StaticResource", + "UrlDispatcher", + "UrlMappingMatchInfo", + "View", + # web_ws + "WebSocketReady", + "WebSocketResponse", + "WSMsgType", + # web + "run_app", +) + + +try: + from ssl import SSLContext +except ImportError: # pragma: no cover + SSLContext = Any # type: ignore[misc,assignment] + +HostSequence = TypingIterable[str] + + +async def _run_app( + app: Union[Application, Awaitable[Application]], + *, + host: Optional[Union[str, HostSequence]] = None, + port: Optional[int] = None, + path: Optional[str] = None, + sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, + shutdown_timeout: float = 60.0, + keepalive_timeout: float = 75.0, + ssl_context: Optional[SSLContext] = None, + print: Callable[..., None] = print, + backlog: int = 128, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log_format: str = AccessLogger.LOG_FORMAT, + access_log: Optional[logging.Logger] = access_logger, + handle_signals: bool = True, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, +) -> None: + # A internal functio to actually do all dirty job for application running + if asyncio.iscoroutine(app): + app = await app # type: ignore[misc] + + app = cast(Application, app) + + runner = AppRunner( + app, + handle_signals=handle_signals, + access_log_class=access_log_class, + access_log_format=access_log_format, + access_log=access_log, + keepalive_timeout=keepalive_timeout, + ) + + await runner.setup() + + sites: List[BaseSite] = [] + + try: + if host is not None: + if isinstance(host, (str, bytes, bytearray, memoryview)): + sites.append( + TCPSite( + runner, + host, + port, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + else: + for h in host: + sites.append( + TCPSite( + runner, + h, + port, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + elif path is None and sock is None or port is not None: + sites.append( + TCPSite( + runner, + port=port, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + + if path is not None: + if isinstance(path, (str, bytes, bytearray, memoryview)): + sites.append( + UnixSite( + runner, + path, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + else: + for p in path: + sites.append( + UnixSite( + runner, + p, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + + if sock is not None: + if not isinstance(sock, Iterable): + sites.append( + SockSite( + runner, + sock, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + else: + for s in sock: + sites.append( + SockSite( + runner, + s, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + for site in sites: + await site.start() + + if print: # pragma: no branch + names = sorted(str(s.name) for s in runner.sites) + print( + "======== Running on {} ========\n" + "(Press CTRL+C to quit)".format(", ".join(names)) + ) + + # sleep forever by 1 hour intervals, + # on Windows before Python 3.8 wake up every 1 second to handle + # Ctrl+C smoothly + if sys.platform == "win32" and sys.version_info < (3, 8): + delay = 1 + else: + delay = 3600 + + while True: + await asyncio.sleep(delay) + finally: + await runner.cleanup() + + +def _cancel_tasks( + to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop +) -> None: + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + +def run_app( + app: Union[Application, Awaitable[Application]], + *, + host: Optional[Union[str, HostSequence]] = None, + port: Optional[int] = None, + path: Optional[str] = None, + sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, + shutdown_timeout: float = 60.0, + keepalive_timeout: float = 75.0, + ssl_context: Optional[SSLContext] = None, + print: Callable[..., None] = print, + backlog: int = 128, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log_format: str = AccessLogger.LOG_FORMAT, + access_log: Optional[logging.Logger] = access_logger, + handle_signals: bool = True, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> None: + """Run an app locally""" + if loop is None: + loop = asyncio.new_event_loop() + + # Configure if and only if in debugging mode and using the default logger + if loop.get_debug() and access_log and access_log.name == "aiohttp.access": + if access_log.level == logging.NOTSET: + access_log.setLevel(logging.DEBUG) + if not access_log.hasHandlers(): + access_log.addHandler(logging.StreamHandler()) + + main_task = loop.create_task( + _run_app( + app, + host=host, + port=port, + path=path, + sock=sock, + shutdown_timeout=shutdown_timeout, + keepalive_timeout=keepalive_timeout, + ssl_context=ssl_context, + print=print, + backlog=backlog, + access_log_class=access_log_class, + access_log_format=access_log_format, + access_log=access_log, + handle_signals=handle_signals, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + + try: + asyncio.set_event_loop(loop) + loop.run_until_complete(main_task) + except (GracefulExit, KeyboardInterrupt): # pragma: no cover + pass + finally: + _cancel_tasks({main_task}, loop) + _cancel_tasks(all_tasks(loop), loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + loop.close() + + +def main(argv: List[str]) -> None: + arg_parser = ArgumentParser( + description="aiohttp.web Application server", prog="aiohttp.web" + ) + arg_parser.add_argument( + "entry_func", + help=( + "Callable returning the `aiohttp.web.Application` instance to " + "run. Should be specified in the 'module:function' syntax." + ), + metavar="entry-func", + ) + arg_parser.add_argument( + "-H", + "--hostname", + help="TCP/IP hostname to serve on (default: %(default)r)", + default="localhost", + ) + arg_parser.add_argument( + "-P", + "--port", + help="TCP/IP port to serve on (default: %(default)r)", + type=int, + default="8080", + ) + arg_parser.add_argument( + "-U", + "--path", + help="Unix file system path to serve on. Specifying a path will cause " + "hostname and port arguments to be ignored.", + ) + args, extra_argv = arg_parser.parse_known_args(argv) + + # Import logic + mod_str, _, func_str = args.entry_func.partition(":") + if not func_str or not mod_str: + arg_parser.error("'entry-func' not in 'module:function' syntax") + if mod_str.startswith("."): + arg_parser.error("relative module names not supported") + try: + module = import_module(mod_str) + except ImportError as ex: + arg_parser.error(f"unable to import {mod_str}: {ex}") + try: + func = getattr(module, func_str) + except AttributeError: + arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}") + + # Compatibility logic + if args.path is not None and not hasattr(socket, "AF_UNIX"): + arg_parser.error( + "file system paths not supported by your operating" " environment" + ) + + logging.basicConfig(level=logging.DEBUG) + + app = func(extra_argv) + run_app(app, host=args.hostname, port=args.port, path=args.path) + arg_parser.exit(message="Stopped\n") + + +if __name__ == "__main__": # pragma: no branch + main(sys.argv[1:]) # pragma: no cover diff --git a/venv/lib/python3.10/site-packages/aiohttp/web_app.py b/venv/lib/python3.10/site-packages/aiohttp/web_app.py new file mode 100644 index 0000000..8fd4471 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web_app.py @@ -0,0 +1,557 @@ +import asyncio +import logging +import warnings +from functools import partial, update_wrapper +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Awaitable, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from aiosignal import Signal +from frozenlist import FrozenList + +from . import hdrs +from .abc import ( + AbstractAccessLogger, + AbstractMatchInfo, + AbstractRouter, + AbstractStreamWriter, +) +from .helpers import DEBUG +from .http_parser import RawRequestMessage +from .log import web_logger +from .streams import StreamReader +from .web_log import AccessLogger +from .web_middlewares import _fix_request_current_app +from .web_protocol import RequestHandler +from .web_request import Request +from .web_response import StreamResponse +from .web_routedef import AbstractRouteDef +from .web_server import Server +from .web_urldispatcher import ( + AbstractResource, + AbstractRoute, + Domain, + MaskDomain, + MatchedSubAppResource, + PrefixedSubAppResource, + UrlDispatcher, +) + +__all__ = ("Application", "CleanupError") + + +if TYPE_CHECKING: # pragma: no cover + from .typedefs import Handler + + _AppSignal = Signal[Callable[["Application"], Awaitable[None]]] + _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]] + _Middleware = Union[ + Callable[[Request, Handler], Awaitable[StreamResponse]], + Callable[["Application", Handler], Awaitable[Handler]], # old-style + ] + _Middlewares = FrozenList[_Middleware] + _MiddlewaresHandlers = Optional[Sequence[Tuple[_Middleware, bool]]] + _Subapps = List["Application"] +else: + # No type checker mode, skip types + _AppSignal = Signal + _RespPrepareSignal = Signal + _Middleware = Callable + _Middlewares = FrozenList + _MiddlewaresHandlers = Optional[Sequence] + _Subapps = List + + +class Application(MutableMapping[str, Any]): + ATTRS = frozenset( + [ + "logger", + "_debug", + "_router", + "_loop", + "_handler_args", + "_middlewares", + "_middlewares_handlers", + "_run_middlewares", + "_state", + "_frozen", + "_pre_frozen", + "_subapps", + "_on_response_prepare", + "_on_startup", + "_on_shutdown", + "_on_cleanup", + "_client_max_size", + "_cleanup_ctx", + ] + ) + + def __init__( + self, + *, + logger: logging.Logger = web_logger, + router: Optional[UrlDispatcher] = None, + middlewares: Iterable[_Middleware] = (), + handler_args: Optional[Mapping[str, Any]] = None, + client_max_size: int = 1024**2, + loop: Optional[asyncio.AbstractEventLoop] = None, + debug: Any = ..., # mypy doesn't support ellipsis + ) -> None: + if router is None: + router = UrlDispatcher() + else: + warnings.warn( + "router argument is deprecated", DeprecationWarning, stacklevel=2 + ) + assert isinstance(router, AbstractRouter), router + + if loop is not None: + warnings.warn( + "loop argument is deprecated", DeprecationWarning, stacklevel=2 + ) + + if debug is not ...: + warnings.warn( + "debug argument is deprecated", DeprecationWarning, stacklevel=2 + ) + self._debug = debug + self._router: UrlDispatcher = router + self._loop = loop + self._handler_args = handler_args + self.logger = logger + + self._middlewares: _Middlewares = FrozenList(middlewares) + + # initialized on freezing + self._middlewares_handlers: _MiddlewaresHandlers = None + # initialized on freezing + self._run_middlewares: Optional[bool] = None + + self._state: Dict[str, Any] = {} + self._frozen = False + self._pre_frozen = False + self._subapps: _Subapps = [] + + self._on_response_prepare: _RespPrepareSignal = Signal(self) + self._on_startup: _AppSignal = Signal(self) + self._on_shutdown: _AppSignal = Signal(self) + self._on_cleanup: _AppSignal = Signal(self) + self._cleanup_ctx = CleanupContext() + self._on_startup.append(self._cleanup_ctx._on_startup) + self._on_cleanup.append(self._cleanup_ctx._on_cleanup) + self._client_max_size = client_max_size + + def __init_subclass__(cls: Type["Application"]) -> None: + warnings.warn( + "Inheritance class {} from web.Application " + "is discouraged".format(cls.__name__), + DeprecationWarning, + stacklevel=2, + ) + + if DEBUG: # pragma: no cover + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom web.Application.{} attribute " + "is discouraged".format(name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + # MutableMapping API + + def __eq__(self, other: object) -> bool: + return self is other + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def _check_frozen(self) -> None: + if self._frozen: + warnings.warn( + "Changing state of started or joined " "application is deprecated", + DeprecationWarning, + stacklevel=3, + ) + + def __setitem__(self, key: str, value: Any) -> None: + self._check_frozen() + self._state[key] = value + + def __delitem__(self, key: str) -> None: + self._check_frozen() + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + ######## + @property + def loop(self) -> asyncio.AbstractEventLoop: + # Technically the loop can be None + # but we mask it by explicit type cast + # to provide more convinient type annotation + warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2) + return cast(asyncio.AbstractEventLoop, self._loop) + + def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None: + if loop is None: + loop = asyncio.get_event_loop() + if self._loop is not None and self._loop is not loop: + raise RuntimeError( + "web.Application instance initialized with different loop" + ) + + self._loop = loop + + # set loop debug + if self._debug is ...: + self._debug = loop.get_debug() + + # set loop to sub applications + for subapp in self._subapps: + subapp._set_loop(loop) + + @property + def pre_frozen(self) -> bool: + return self._pre_frozen + + def pre_freeze(self) -> None: + if self._pre_frozen: + return + + self._pre_frozen = True + self._middlewares.freeze() + self._router.freeze() + self._on_response_prepare.freeze() + self._cleanup_ctx.freeze() + self._on_startup.freeze() + self._on_shutdown.freeze() + self._on_cleanup.freeze() + self._middlewares_handlers = tuple(self._prepare_middleware()) + + # If current app and any subapp do not have middlewares avoid run all + # of the code footprint that it implies, which have a middleware + # hardcoded per app that sets up the current_app attribute. If no + # middlewares are configured the handler will receive the proper + # current_app without needing all of this code. + self._run_middlewares = True if self.middlewares else False + + for subapp in self._subapps: + subapp.pre_freeze() + self._run_middlewares = self._run_middlewares or subapp._run_middlewares + + @property + def frozen(self) -> bool: + return self._frozen + + def freeze(self) -> None: + if self._frozen: + return + + self.pre_freeze() + self._frozen = True + for subapp in self._subapps: + subapp.freeze() + + @property + def debug(self) -> bool: + warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2) + return self._debug # type: ignore[no-any-return] + + def _reg_subapp_signals(self, subapp: "Application") -> None: + def reg_handler(signame: str) -> None: + subsig = getattr(subapp, signame) + + async def handler(app: "Application") -> None: + await subsig.send(subapp) + + appsig = getattr(self, signame) + appsig.append(handler) + + reg_handler("on_startup") + reg_handler("on_shutdown") + reg_handler("on_cleanup") + + def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource: + if not isinstance(prefix, str): + raise TypeError("Prefix must be str") + prefix = prefix.rstrip("/") + if not prefix: + raise ValueError("Prefix cannot be empty") + factory = partial(PrefixedSubAppResource, prefix, subapp) + return self._add_subapp(factory, subapp) + + def _add_subapp( + self, resource_factory: Callable[[], AbstractResource], subapp: "Application" + ) -> AbstractResource: + if self.frozen: + raise RuntimeError("Cannot add sub application to frozen application") + if subapp.frozen: + raise RuntimeError("Cannot add frozen application") + resource = resource_factory() + self.router.register_resource(resource) + self._reg_subapp_signals(subapp) + self._subapps.append(subapp) + subapp.pre_freeze() + if self._loop is not None: + subapp._set_loop(self._loop) + return resource + + def add_domain(self, domain: str, subapp: "Application") -> AbstractResource: + if not isinstance(domain, str): + raise TypeError("Domain must be str") + elif "*" in domain: + rule: Domain = MaskDomain(domain) + else: + rule = Domain(domain) + factory = partial(MatchedSubAppResource, rule, subapp) + return self._add_subapp(factory, subapp) + + def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: + return self.router.add_routes(routes) + + @property + def on_response_prepare(self) -> _RespPrepareSignal: + return self._on_response_prepare + + @property + def on_startup(self) -> _AppSignal: + return self._on_startup + + @property + def on_shutdown(self) -> _AppSignal: + return self._on_shutdown + + @property + def on_cleanup(self) -> _AppSignal: + return self._on_cleanup + + @property + def cleanup_ctx(self) -> "CleanupContext": + return self._cleanup_ctx + + @property + def router(self) -> UrlDispatcher: + return self._router + + @property + def middlewares(self) -> _Middlewares: + return self._middlewares + + def _make_handler( + self, + *, + loop: Optional[asyncio.AbstractEventLoop] = None, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + **kwargs: Any, + ) -> Server: + + if not issubclass(access_log_class, AbstractAccessLogger): + raise TypeError( + "access_log_class must be subclass of " + "aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class) + ) + + self._set_loop(loop) + self.freeze() + + kwargs["debug"] = self._debug + kwargs["access_log_class"] = access_log_class + if self._handler_args: + for k, v in self._handler_args.items(): + kwargs[k] = v + + return Server( + self._handle, # type: ignore[arg-type] + request_factory=self._make_request, + loop=self._loop, + **kwargs, + ) + + def make_handler( + self, + *, + loop: Optional[asyncio.AbstractEventLoop] = None, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + **kwargs: Any, + ) -> Server: + + warnings.warn( + "Application.make_handler(...) is deprecated, " "use AppRunner API instead", + DeprecationWarning, + stacklevel=2, + ) + + return self._make_handler( + loop=loop, access_log_class=access_log_class, **kwargs + ) + + async def startup(self) -> None: + """Causes on_startup signal + + Should be called in the event loop along with the request handler. + """ + await self.on_startup.send(self) + + async def shutdown(self) -> None: + """Causes on_shutdown signal + + Should be called before cleanup() + """ + await self.on_shutdown.send(self) + + async def cleanup(self) -> None: + """Causes on_cleanup signal + + Should be called after shutdown() + """ + if self.on_cleanup.frozen: + await self.on_cleanup.send(self) + else: + # If an exception occurs in startup, ensure cleanup contexts are completed. + await self._cleanup_ctx._on_cleanup(self) + + def _make_request( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: RequestHandler, + writer: AbstractStreamWriter, + task: "asyncio.Task[None]", + _cls: Type[Request] = Request, + ) -> Request: + return _cls( + message, + payload, + protocol, + writer, + task, + self._loop, + client_max_size=self._client_max_size, + ) + + def _prepare_middleware(self) -> Iterator[Tuple[_Middleware, bool]]: + for m in reversed(self._middlewares): + if getattr(m, "__middleware_version__", None) == 1: + yield m, True + else: + warnings.warn( + 'old-style middleware "{!r}" deprecated, ' "see #2252".format(m), + DeprecationWarning, + stacklevel=2, + ) + yield m, False + + yield _fix_request_current_app(self), True + + async def _handle(self, request: Request) -> StreamResponse: + loop = asyncio.get_event_loop() + debug = loop.get_debug() + match_info = await self._router.resolve(request) + if debug: # pragma: no cover + if not isinstance(match_info, AbstractMatchInfo): + raise TypeError( + "match_info should be AbstractMatchInfo " + "instance, not {!r}".format(match_info) + ) + match_info.add_app(self) + + match_info.freeze() + + resp = None + request._match_info = match_info + expect = request.headers.get(hdrs.EXPECT) + if expect: + resp = await match_info.expect_handler(request) + await request.writer.drain() + + if resp is None: + handler = match_info.handler + + if self._run_middlewares: + for app in match_info.apps[::-1]: + for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] # noqa + if new_style: + handler = update_wrapper( + partial(m, handler=handler), handler + ) + else: + handler = await m(app, handler) # type: ignore[arg-type] + + resp = await handler(request) + + return resp + + def __call__(self) -> "Application": + """gunicorn compatibility""" + return self + + def __repr__(self) -> str: + return f"" + + def __bool__(self) -> bool: + return True + + +class CleanupError(RuntimeError): + @property + def exceptions(self) -> List[BaseException]: + return cast(List[BaseException], self.args[1]) + + +if TYPE_CHECKING: # pragma: no cover + _CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]] +else: + _CleanupContextBase = FrozenList + + +class CleanupContext(_CleanupContextBase): + def __init__(self) -> None: + super().__init__() + self._exits: List[AsyncIterator[None]] = [] + + async def _on_startup(self, app: Application) -> None: + for cb in self: + it = cb(app).__aiter__() + await it.__anext__() + self._exits.append(it) + + async def _on_cleanup(self, app: Application) -> None: + errors = [] + for it in reversed(self._exits): + try: + await it.__anext__() + except StopAsyncIteration: + pass + except Exception as exc: + errors.append(exc) + else: + errors.append(RuntimeError(f"{it!r} has more than one 'yield'")) + if errors: + if len(errors) == 1: + raise errors[0] + else: + raise CleanupError("Multiple errors on cleanup stage", errors) diff --git a/venv/lib/python3.10/site-packages/aiohttp/web_exceptions.py b/venv/lib/python3.10/site-packages/aiohttp/web_exceptions.py new file mode 100644 index 0000000..ae706a1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web_exceptions.py @@ -0,0 +1,441 @@ +import warnings +from typing import Any, Dict, Iterable, List, Optional, Set # noqa + +from yarl import URL + +from .typedefs import LooseHeaders, StrOrURL +from .web_response import Response + +__all__ = ( + "HTTPException", + "HTTPError", + "HTTPRedirection", + "HTTPSuccessful", + "HTTPOk", + "HTTPCreated", + "HTTPAccepted", + "HTTPNonAuthoritativeInformation", + "HTTPNoContent", + "HTTPResetContent", + "HTTPPartialContent", + "HTTPMultipleChoices", + "HTTPMovedPermanently", + "HTTPFound", + "HTTPSeeOther", + "HTTPNotModified", + "HTTPUseProxy", + "HTTPTemporaryRedirect", + "HTTPPermanentRedirect", + "HTTPClientError", + "HTTPBadRequest", + "HTTPUnauthorized", + "HTTPPaymentRequired", + "HTTPForbidden", + "HTTPNotFound", + "HTTPMethodNotAllowed", + "HTTPNotAcceptable", + "HTTPProxyAuthenticationRequired", + "HTTPRequestTimeout", + "HTTPConflict", + "HTTPGone", + "HTTPLengthRequired", + "HTTPPreconditionFailed", + "HTTPRequestEntityTooLarge", + "HTTPRequestURITooLong", + "HTTPUnsupportedMediaType", + "HTTPRequestRangeNotSatisfiable", + "HTTPExpectationFailed", + "HTTPMisdirectedRequest", + "HTTPUnprocessableEntity", + "HTTPFailedDependency", + "HTTPUpgradeRequired", + "HTTPPreconditionRequired", + "HTTPTooManyRequests", + "HTTPRequestHeaderFieldsTooLarge", + "HTTPUnavailableForLegalReasons", + "HTTPServerError", + "HTTPInternalServerError", + "HTTPNotImplemented", + "HTTPBadGateway", + "HTTPServiceUnavailable", + "HTTPGatewayTimeout", + "HTTPVersionNotSupported", + "HTTPVariantAlsoNegotiates", + "HTTPInsufficientStorage", + "HTTPNotExtended", + "HTTPNetworkAuthenticationRequired", +) + + +############################################################ +# HTTP Exceptions +############################################################ + + +class HTTPException(Response, Exception): + + # You should set in subclasses: + # status = 200 + + status_code = -1 + empty_body = False + + __http_exception__ = True + + def __init__( + self, + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + if body is not None: + warnings.warn( + "body argument is deprecated for http web exceptions", + DeprecationWarning, + ) + Response.__init__( + self, + status=self.status_code, + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + Exception.__init__(self, self.reason) + if self.body is None and not self.empty_body: + self.text = f"{self.status}: {self.reason}" + + def __bool__(self) -> bool: + return True + + +class HTTPError(HTTPException): + """Base class for exceptions with status codes in the 400s and 500s.""" + + +class HTTPRedirection(HTTPException): + """Base class for exceptions with status codes in the 300s.""" + + +class HTTPSuccessful(HTTPException): + """Base class for exceptions with status codes in the 200s.""" + + +class HTTPOk(HTTPSuccessful): + status_code = 200 + + +class HTTPCreated(HTTPSuccessful): + status_code = 201 + + +class HTTPAccepted(HTTPSuccessful): + status_code = 202 + + +class HTTPNonAuthoritativeInformation(HTTPSuccessful): + status_code = 203 + + +class HTTPNoContent(HTTPSuccessful): + status_code = 204 + empty_body = True + + +class HTTPResetContent(HTTPSuccessful): + status_code = 205 + empty_body = True + + +class HTTPPartialContent(HTTPSuccessful): + status_code = 206 + + +############################################################ +# 3xx redirection +############################################################ + + +class _HTTPMove(HTTPRedirection): + def __init__( + self, + location: StrOrURL, + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + if not location: + raise ValueError("HTTP redirects need a location to redirect to.") + super().__init__( + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + self.headers["Location"] = str(URL(location)) + self.location = location + + +class HTTPMultipleChoices(_HTTPMove): + status_code = 300 + + +class HTTPMovedPermanently(_HTTPMove): + status_code = 301 + + +class HTTPFound(_HTTPMove): + status_code = 302 + + +# This one is safe after a POST (the redirected location will be +# retrieved with GET): +class HTTPSeeOther(_HTTPMove): + status_code = 303 + + +class HTTPNotModified(HTTPRedirection): + # FIXME: this should include a date or etag header + status_code = 304 + empty_body = True + + +class HTTPUseProxy(_HTTPMove): + # Not a move, but looks a little like one + status_code = 305 + + +class HTTPTemporaryRedirect(_HTTPMove): + status_code = 307 + + +class HTTPPermanentRedirect(_HTTPMove): + status_code = 308 + + +############################################################ +# 4xx client error +############################################################ + + +class HTTPClientError(HTTPError): + pass + + +class HTTPBadRequest(HTTPClientError): + status_code = 400 + + +class HTTPUnauthorized(HTTPClientError): + status_code = 401 + + +class HTTPPaymentRequired(HTTPClientError): + status_code = 402 + + +class HTTPForbidden(HTTPClientError): + status_code = 403 + + +class HTTPNotFound(HTTPClientError): + status_code = 404 + + +class HTTPMethodNotAllowed(HTTPClientError): + status_code = 405 + + def __init__( + self, + method: str, + allowed_methods: Iterable[str], + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + allow = ",".join(sorted(allowed_methods)) + super().__init__( + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + self.headers["Allow"] = allow + self.allowed_methods: Set[str] = set(allowed_methods) + self.method = method.upper() + + +class HTTPNotAcceptable(HTTPClientError): + status_code = 406 + + +class HTTPProxyAuthenticationRequired(HTTPClientError): + status_code = 407 + + +class HTTPRequestTimeout(HTTPClientError): + status_code = 408 + + +class HTTPConflict(HTTPClientError): + status_code = 409 + + +class HTTPGone(HTTPClientError): + status_code = 410 + + +class HTTPLengthRequired(HTTPClientError): + status_code = 411 + + +class HTTPPreconditionFailed(HTTPClientError): + status_code = 412 + + +class HTTPRequestEntityTooLarge(HTTPClientError): + status_code = 413 + + def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None: + kwargs.setdefault( + "text", + "Maximum request body size {} exceeded, " + "actual body size {}".format(max_size, actual_size), + ) + super().__init__(**kwargs) + + +class HTTPRequestURITooLong(HTTPClientError): + status_code = 414 + + +class HTTPUnsupportedMediaType(HTTPClientError): + status_code = 415 + + +class HTTPRequestRangeNotSatisfiable(HTTPClientError): + status_code = 416 + + +class HTTPExpectationFailed(HTTPClientError): + status_code = 417 + + +class HTTPMisdirectedRequest(HTTPClientError): + status_code = 421 + + +class HTTPUnprocessableEntity(HTTPClientError): + status_code = 422 + + +class HTTPFailedDependency(HTTPClientError): + status_code = 424 + + +class HTTPUpgradeRequired(HTTPClientError): + status_code = 426 + + +class HTTPPreconditionRequired(HTTPClientError): + status_code = 428 + + +class HTTPTooManyRequests(HTTPClientError): + status_code = 429 + + +class HTTPRequestHeaderFieldsTooLarge(HTTPClientError): + status_code = 431 + + +class HTTPUnavailableForLegalReasons(HTTPClientError): + status_code = 451 + + def __init__( + self, + link: str, + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + super().__init__( + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + self.headers["Link"] = '<%s>; rel="blocked-by"' % link + self.link = link + + +############################################################ +# 5xx Server Error +############################################################ +# Response status codes beginning with the digit "5" indicate cases in +# which the server is aware that it has erred or is incapable of +# performing the request. Except when responding to a HEAD request, the +# server SHOULD include an entity containing an explanation of the error +# situation, and whether it is a temporary or permanent condition. User +# agents SHOULD display any included entity to the user. These response +# codes are applicable to any request method. + + +class HTTPServerError(HTTPError): + pass + + +class HTTPInternalServerError(HTTPServerError): + status_code = 500 + + +class HTTPNotImplemented(HTTPServerError): + status_code = 501 + + +class HTTPBadGateway(HTTPServerError): + status_code = 502 + + +class HTTPServiceUnavailable(HTTPServerError): + status_code = 503 + + +class HTTPGatewayTimeout(HTTPServerError): + status_code = 504 + + +class HTTPVersionNotSupported(HTTPServerError): + status_code = 505 + + +class HTTPVariantAlsoNegotiates(HTTPServerError): + status_code = 506 + + +class HTTPInsufficientStorage(HTTPServerError): + status_code = 507 + + +class HTTPNotExtended(HTTPServerError): + status_code = 510 + + +class HTTPNetworkAuthenticationRequired(HTTPServerError): + status_code = 511 diff --git a/venv/lib/python3.10/site-packages/aiohttp/web_fileresponse.py b/venv/lib/python3.10/site-packages/aiohttp/web_fileresponse.py new file mode 100644 index 0000000..f41ed3f --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web_fileresponse.py @@ -0,0 +1,288 @@ +import asyncio +import mimetypes +import os +import pathlib +import sys +from typing import ( # noqa + IO, + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Iterator, + List, + Optional, + Tuple, + Union, + cast, +) + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ETAG_ANY, ETag +from .typedefs import Final, LooseHeaders +from .web_exceptions import ( + HTTPNotModified, + HTTPPartialContent, + HTTPPreconditionFailed, + HTTPRequestRangeNotSatisfiable, +) +from .web_response import StreamResponse + +__all__ = ("FileResponse",) + +if TYPE_CHECKING: # pragma: no cover + from .web_request import BaseRequest + + +_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] + + +NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE")) + + +class FileResponse(StreamResponse): + """A response object can be used to send files.""" + + def __init__( + self, + path: Union[str, pathlib.Path], + chunk_size: int = 256 * 1024, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + ) -> None: + super().__init__(status=status, reason=reason, headers=headers) + + if isinstance(path, str): + path = pathlib.Path(path) + + self._path = path + self._chunk_size = chunk_size + + async def _sendfile_fallback( + self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int + ) -> AbstractStreamWriter: + # To keep memory usage low,fobj is transferred in chunks + # controlled by the constructor's chunk_size argument. + + chunk_size = self._chunk_size + loop = asyncio.get_event_loop() + + await loop.run_in_executor(None, fobj.seek, offset) + + chunk = await loop.run_in_executor(None, fobj.read, chunk_size) + while chunk: + await writer.write(chunk) + count = count - chunk_size + if count <= 0: + break + chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count)) + + await writer.drain() + return writer + + async def _sendfile( + self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int + ) -> AbstractStreamWriter: + writer = await super().prepare(request) + assert writer is not None + + if NOSENDFILE or sys.version_info < (3, 7) or self.compression: + return await self._sendfile_fallback(writer, fobj, offset, count) + + loop = request._loop + transport = request.transport + assert transport is not None + + try: + await loop.sendfile(transport, fobj, offset, count) + except NotImplementedError: + return await self._sendfile_fallback(writer, fobj, offset, count) + + await super().write_eof() + return writer + + @staticmethod + def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool: + if len(etags) == 1 and etags[0].value == ETAG_ANY: + return True + return any(etag.value == etag_value for etag in etags if not etag.is_weak) + + async def _not_modified( + self, request: "BaseRequest", etag_value: str, last_modified: float + ) -> Optional[AbstractStreamWriter]: + self.set_status(HTTPNotModified.status_code) + self._length_check = False + self.etag = etag_value # type: ignore[assignment] + self.last_modified = last_modified # type: ignore[assignment] + # Delete any Content-Length headers provided by user. HTTP 304 + # should always have empty response body + return await super().prepare(request) + + async def _precondition_failed( + self, request: "BaseRequest" + ) -> Optional[AbstractStreamWriter]: + self.set_status(HTTPPreconditionFailed.status_code) + self.content_length = 0 + return await super().prepare(request) + + async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: + filepath = self._path + + gzip = False + if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""): + gzip_path = filepath.with_name(filepath.name + ".gz") + + if gzip_path.is_file(): + filepath = gzip_path + gzip = True + + loop = asyncio.get_event_loop() + st: os.stat_result = await loop.run_in_executor(None, filepath.stat) + + etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" + last_modified = st.st_mtime + + # https://tools.ietf.org/html/rfc7232#section-6 + ifmatch = request.if_match + if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch): + return await self._precondition_failed(request) + + unmodsince = request.if_unmodified_since + if ( + unmodsince is not None + and ifmatch is None + and st.st_mtime > unmodsince.timestamp() + ): + return await self._precondition_failed(request) + + ifnonematch = request.if_none_match + if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch): + return await self._not_modified(request, etag_value, last_modified) + + modsince = request.if_modified_since + if ( + modsince is not None + and ifnonematch is None + and st.st_mtime <= modsince.timestamp() + ): + return await self._not_modified(request, etag_value, last_modified) + + if hdrs.CONTENT_TYPE not in self.headers: + ct, encoding = mimetypes.guess_type(str(filepath)) + if not ct: + ct = "application/octet-stream" + should_set_ct = True + else: + encoding = "gzip" if gzip else None + should_set_ct = False + + status = self._status + file_size = st.st_size + count = file_size + + start = None + + ifrange = request.if_range + if ifrange is None or st.st_mtime <= ifrange.timestamp(): + # If-Range header check: + # condition = cached date >= last modification date + # return 206 if True else 200. + # if False: + # Range header would not be processed, return 200 + # if True but Range header missing + # return 200 + try: + rng = request.http_range + start = rng.start + end = rng.stop + except ValueError: + # https://tools.ietf.org/html/rfc7233: + # A server generating a 416 (Range Not Satisfiable) response to + # a byte-range request SHOULD send a Content-Range header field + # with an unsatisfied-range value. + # The complete-length in a 416 response indicates the current + # length of the selected representation. + # + # Will do the same below. Many servers ignore this and do not + # send a Content-Range header with HTTP 416 + self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" + self.set_status(HTTPRequestRangeNotSatisfiable.status_code) + return await super().prepare(request) + + # If a range request has been made, convert start, end slice + # notation into file pointer offset and count + if start is not None or end is not None: + if start < 0 and end is None: # return tail of file + start += file_size + if start < 0: + # if Range:bytes=-1000 in request header but file size + # is only 200, there would be trouble without this + start = 0 + count = file_size - start + else: + # rfc7233:If the last-byte-pos value is + # absent, or if the value is greater than or equal to + # the current length of the representation data, + # the byte range is interpreted as the remainder + # of the representation (i.e., the server replaces the + # value of last-byte-pos with a value that is one less than + # the current length of the selected representation). + count = ( + min(end if end is not None else file_size, file_size) - start + ) + + if start >= file_size: + # HTTP 416 should be returned in this case. + # + # According to https://tools.ietf.org/html/rfc7233: + # If a valid byte-range-set includes at least one + # byte-range-spec with a first-byte-pos that is less than + # the current length of the representation, or at least one + # suffix-byte-range-spec with a non-zero suffix-length, + # then the byte-range-set is satisfiable. Otherwise, the + # byte-range-set is unsatisfiable. + self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" + self.set_status(HTTPRequestRangeNotSatisfiable.status_code) + return await super().prepare(request) + + status = HTTPPartialContent.status_code + # Even though you are sending the whole file, you should still + # return a HTTP 206 for a Range request. + self.set_status(status) + + if should_set_ct: + self.content_type = ct # type: ignore[assignment] + if encoding: + self.headers[hdrs.CONTENT_ENCODING] = encoding + if gzip: + self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING + + self.etag = etag_value # type: ignore[assignment] + self.last_modified = st.st_mtime # type: ignore[assignment] + self.content_length = count + + self.headers[hdrs.ACCEPT_RANGES] = "bytes" + + real_start = cast(int, start) + + if status == HTTPPartialContent.status_code: + self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( + real_start, real_start + count - 1, file_size + ) + + # If we are sending 0 bytes calling sendfile() will throw a ValueError + if count == 0 or request.method == hdrs.METH_HEAD or self.status in [204, 304]: + return await super().prepare(request) + + fobj = await loop.run_in_executor(None, filepath.open, "rb") + if start: # be aware that start could be None or int=0 here. + offset = start + else: + offset = 0 + + try: + return await self._sendfile(request, fobj, offset, count) + finally: + await loop.run_in_executor(None, fobj.close) diff --git a/venv/lib/python3.10/site-packages/aiohttp/web_log.py b/venv/lib/python3.10/site-packages/aiohttp/web_log.py new file mode 100644 index 0000000..bc6e3b5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web_log.py @@ -0,0 +1,208 @@ +import datetime +import functools +import logging +import os +import re +from collections import namedtuple +from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa + +from .abc import AbstractAccessLogger +from .web_request import BaseRequest +from .web_response import StreamResponse + +KeyMethod = namedtuple("KeyMethod", "key method") + + +class AccessLogger(AbstractAccessLogger): + """Helper object to log access. + + Usage: + log = logging.getLogger("spam") + log_format = "%a %{User-Agent}i" + access_logger = AccessLogger(log, log_format) + access_logger.log(request, response, time) + + Format: + %% The percent sign + %a Remote IP-address (IP-address of proxy if using reverse proxy) + %t Time when the request was started to process + %P The process ID of the child that serviced the request + %r First line of request + %s Response status code + %b Size of response in bytes, including HTTP headers + %T Time taken to serve the request, in seconds + %Tf Time taken to serve the request, in seconds with floating fraction + in .06f format + %D Time taken to serve the request, in microseconds + %{FOO}i request.headers['FOO'] + %{FOO}o response.headers['FOO'] + %{FOO}e os.environ['FOO'] + + """ + + LOG_FORMAT_MAP = { + "a": "remote_address", + "t": "request_start_time", + "P": "process_id", + "r": "first_request_line", + "s": "response_status", + "b": "response_size", + "T": "request_time", + "Tf": "request_time_frac", + "D": "request_time_micro", + "i": "request_header", + "o": "response_header", + } + + LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"' + FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)") + CLEANUP_RE = re.compile(r"(%[^s])") + _FORMAT_CACHE: Dict[str, Tuple[str, List[KeyMethod]]] = {} + + def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None: + """Initialise the logger. + + logger is a logger object to be used for logging. + log_format is a string with apache compatible log format description. + + """ + super().__init__(logger, log_format=log_format) + + _compiled_format = AccessLogger._FORMAT_CACHE.get(log_format) + if not _compiled_format: + _compiled_format = self.compile_format(log_format) + AccessLogger._FORMAT_CACHE[log_format] = _compiled_format + + self._log_format, self._methods = _compiled_format + + def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]: + """Translate log_format into form usable by modulo formatting + + All known atoms will be replaced with %s + Also methods for formatting of those atoms will be added to + _methods in appropriate order + + For example we have log_format = "%a %t" + This format will be translated to "%s %s" + Also contents of _methods will be + [self._format_a, self._format_t] + These method will be called and results will be passed + to translated string format. + + Each _format_* method receive 'args' which is list of arguments + given to self.log + + Exceptions are _format_e, _format_i and _format_o methods which + also receive key name (by functools.partial) + + """ + # list of (key, method) tuples, we don't use an OrderedDict as users + # can repeat the same key more than once + methods = list() + + for atom in self.FORMAT_RE.findall(log_format): + if atom[1] == "": + format_key1 = self.LOG_FORMAT_MAP[atom[0]] + m = getattr(AccessLogger, "_format_%s" % atom[0]) + key_method = KeyMethod(format_key1, m) + else: + format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1]) + m = getattr(AccessLogger, "_format_%s" % atom[2]) + key_method = KeyMethod(format_key2, functools.partial(m, atom[1])) + + methods.append(key_method) + + log_format = self.FORMAT_RE.sub(r"%s", log_format) + log_format = self.CLEANUP_RE.sub(r"%\1", log_format) + return log_format, methods + + @staticmethod + def _format_i( + key: str, request: BaseRequest, response: StreamResponse, time: float + ) -> str: + if request is None: + return "(no headers)" + + # suboptimal, make istr(key) once + return request.headers.get(key, "-") + + @staticmethod + def _format_o( + key: str, request: BaseRequest, response: StreamResponse, time: float + ) -> str: + # suboptimal, make istr(key) once + return response.headers.get(key, "-") + + @staticmethod + def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str: + if request is None: + return "-" + ip = request.remote + return ip if ip is not None else "-" + + @staticmethod + def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str: + now = datetime.datetime.utcnow() + start_time = now - datetime.timedelta(seconds=time) + return start_time.strftime("[%d/%b/%Y:%H:%M:%S +0000]") + + @staticmethod + def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str: + return "<%s>" % os.getpid() + + @staticmethod + def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str: + if request is None: + return "-" + return "{} {} HTTP/{}.{}".format( + request.method, + request.path_qs, + request.version.major, + request.version.minor, + ) + + @staticmethod + def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int: + return response.status + + @staticmethod + def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int: + return response.body_length + + @staticmethod + def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str: + return str(round(time)) + + @staticmethod + def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str: + return "%06f" % time + + @staticmethod + def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str: + return str(round(time * 1000000)) + + def _format_line( + self, request: BaseRequest, response: StreamResponse, time: float + ) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]: + return [(key, method(request, response, time)) for key, method in self._methods] + + def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: + try: + fmt_info = self._format_line(request, response, time) + + values = list() + extra = dict() + for key, value in fmt_info: + values.append(value) + + if key.__class__ is str: + extra[key] = value + else: + k1, k2 = key # type: ignore[misc] + dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type] + dct[k2] = value # type: ignore[index,has-type] + extra[k1] = dct # type: ignore[has-type,assignment] + + self.logger.info(self._log_format % tuple(values), extra=extra) + except Exception: + self.logger.exception("Error in logging") diff --git a/venv/lib/python3.10/site-packages/aiohttp/web_middlewares.py b/venv/lib/python3.10/site-packages/aiohttp/web_middlewares.py new file mode 100644 index 0000000..fabcc44 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web_middlewares.py @@ -0,0 +1,119 @@ +import re +from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar + +from .typedefs import Handler +from .web_exceptions import HTTPPermanentRedirect, _HTTPMove +from .web_request import Request +from .web_response import StreamResponse +from .web_urldispatcher import SystemRoute + +__all__ = ( + "middleware", + "normalize_path_middleware", +) + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application + +_Func = TypeVar("_Func") + + +async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]: + alt_request = request.clone(rel_url=path) + + match_info = await request.app.router.resolve(alt_request) + alt_request._match_info = match_info + + if match_info.http_exception is None: + return True, alt_request + + return False, request + + +def middleware(f: _Func) -> _Func: + f.__middleware_version__ = 1 # type: ignore[attr-defined] + return f + + +_Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]] + + +def normalize_path_middleware( + *, + append_slash: bool = True, + remove_slash: bool = False, + merge_slashes: bool = True, + redirect_class: Type[_HTTPMove] = HTTPPermanentRedirect, +) -> _Middleware: + """Factory for producing a middleware that normalizes the path of a request. + + Normalizing means: + - Add or remove a trailing slash to the path. + - Double slashes are replaced by one. + + The middleware returns as soon as it finds a path that resolves + correctly. The order if both merge and append/remove are enabled is + 1) merge slashes + 2) append/remove slash + 3) both merge slashes and append/remove slash. + If the path resolves with at least one of those conditions, it will + redirect to the new path. + + Only one of `append_slash` and `remove_slash` can be enabled. If both + are `True` the factory will raise an assertion error + + If `append_slash` is `True` the middleware will append a slash when + needed. If a resource is defined with trailing slash and the request + comes without it, it will append it automatically. + + If `remove_slash` is `True`, `append_slash` must be `False`. When enabled + the middleware will remove trailing slashes and redirect if the resource + is defined + + If merge_slashes is True, merge multiple consecutive slashes in the + path into one. + """ + correct_configuration = not (append_slash and remove_slash) + assert correct_configuration, "Cannot both remove and append slash" + + @middleware + async def impl(request: Request, handler: Handler) -> StreamResponse: + if isinstance(request.match_info.route, SystemRoute): + paths_to_check = [] + if "?" in request.raw_path: + path, query = request.raw_path.split("?", 1) + query = "?" + query + else: + query = "" + path = request.raw_path + + if merge_slashes: + paths_to_check.append(re.sub("//+", "/", path)) + if append_slash and not request.path.endswith("/"): + paths_to_check.append(path + "/") + if remove_slash and request.path.endswith("/"): + paths_to_check.append(path[:-1]) + if merge_slashes and append_slash: + paths_to_check.append(re.sub("//+", "/", path + "/")) + if merge_slashes and remove_slash: + merged_slashes = re.sub("//+", "/", path) + paths_to_check.append(merged_slashes[:-1]) + + for path in paths_to_check: + path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg + resolves, request = await _check_request_resolves(request, path) + if resolves: + raise redirect_class(request.raw_path + query) + + return await handler(request) + + return impl + + +def _fix_request_current_app(app: "Application") -> _Middleware: + @middleware + async def impl(request: Request, handler: Handler) -> StreamResponse: + with request.match_info.set_current_app(app): + return await handler(request) + + return impl diff --git a/venv/lib/python3.10/site-packages/aiohttp/web_protocol.py b/venv/lib/python3.10/site-packages/aiohttp/web_protocol.py new file mode 100644 index 0000000..10a9608 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web_protocol.py @@ -0,0 +1,679 @@ +import asyncio +import asyncio.streams +import traceback +import warnings +from collections import deque +from contextlib import suppress +from html import escape as html_escape +from http import HTTPStatus +from logging import Logger +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Deque, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import attr +import yarl + +from .abc import AbstractAccessLogger, AbstractStreamWriter +from .base_protocol import BaseProtocol +from .helpers import ceil_timeout +from .http import ( + HttpProcessingError, + HttpRequestParser, + HttpVersion10, + RawRequestMessage, + StreamWriter, +) +from .log import access_logger, server_logger +from .streams import EMPTY_PAYLOAD, StreamReader +from .tcp_helpers import tcp_keepalive +from .web_exceptions import HTTPException +from .web_log import AccessLogger +from .web_request import BaseRequest +from .web_response import Response, StreamResponse + +__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError") + +if TYPE_CHECKING: # pragma: no cover + from .web_server import Server + + +_RequestFactory = Callable[ + [ + RawRequestMessage, + StreamReader, + "RequestHandler", + AbstractStreamWriter, + "asyncio.Task[None]", + ], + BaseRequest, +] + +_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]] + +ERROR = RawRequestMessage( + "UNKNOWN", + "/", + HttpVersion10, + {}, # type: ignore[arg-type] + {}, # type: ignore[arg-type] + True, + None, + False, + False, + yarl.URL("/"), +) + + +class RequestPayloadError(Exception): + """Payload parsing error.""" + + +class PayloadAccessError(Exception): + """Payload was accessed after response was sent.""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class _ErrInfo: + status: int + exc: BaseException + message: str + + +_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader] + + +class RequestHandler(BaseProtocol): + """HTTP protocol implementation. + + RequestHandler handles incoming HTTP request. It reads request line, + request headers and request payload and calls handle_request() method. + By default it always returns with 404 response. + + RequestHandler handles errors in incoming request, like bad + status line, bad headers or incomplete payload. If any error occurs, + connection gets closed. + + keepalive_timeout -- number of seconds before closing + keep-alive connection + + tcp_keepalive -- TCP keep-alive is on, default is on + + debug -- enable debug mode + + logger -- custom logger object + + access_log_class -- custom class for access_logger + + access_log -- custom logging object + + access_log_format -- access log format string + + loop -- Optional event loop + + max_line_size -- Optional maximum header line size + + max_field_size -- Optional maximum header field size + + max_headers -- Optional maximum header size + + """ + + KEEPALIVE_RESCHEDULE_DELAY = 1 + + __slots__ = ( + "_request_count", + "_keepalive", + "_manager", + "_request_handler", + "_request_factory", + "_tcp_keepalive", + "_keepalive_time", + "_keepalive_handle", + "_keepalive_timeout", + "_lingering_time", + "_messages", + "_message_tail", + "_waiter", + "_task_handler", + "_upgrade", + "_payload_parser", + "_request_parser", + "_reading_paused", + "logger", + "debug", + "access_log", + "access_logger", + "_close", + "_force_close", + "_current_request", + ) + + def __init__( + self, + manager: "Server", + *, + loop: asyncio.AbstractEventLoop, + keepalive_timeout: float = 75.0, # NGINX default is 75 secs + tcp_keepalive: bool = True, + logger: Logger = server_logger, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log: Logger = access_logger, + access_log_format: str = AccessLogger.LOG_FORMAT, + debug: bool = False, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + lingering_time: float = 10.0, + read_bufsize: int = 2**16, + auto_decompress: bool = True, + ): + super().__init__(loop) + + self._request_count = 0 + self._keepalive = False + self._current_request: Optional[BaseRequest] = None + self._manager: Optional[Server] = manager + self._request_handler: Optional[_RequestHandler] = manager.request_handler + self._request_factory: Optional[_RequestFactory] = manager.request_factory + + self._tcp_keepalive = tcp_keepalive + # placeholder to be replaced on keepalive timeout setup + self._keepalive_time = 0.0 + self._keepalive_handle: Optional[asyncio.Handle] = None + self._keepalive_timeout = keepalive_timeout + self._lingering_time = float(lingering_time) + + self._messages: Deque[_MsgType] = deque() + self._message_tail = b"" + + self._waiter: Optional[asyncio.Future[None]] = None + self._task_handler: Optional[asyncio.Task[None]] = None + + self._upgrade = False + self._payload_parser: Any = None + self._request_parser: Optional[HttpRequestParser] = HttpRequestParser( + self, + loop, + read_bufsize, + max_line_size=max_line_size, + max_field_size=max_field_size, + max_headers=max_headers, + payload_exception=RequestPayloadError, + auto_decompress=auto_decompress, + ) + + self.logger = logger + self.debug = debug + self.access_log = access_log + if access_log: + self.access_logger: Optional[AbstractAccessLogger] = access_log_class( + access_log, access_log_format + ) + else: + self.access_logger = None + + self._close = False + self._force_close = False + + def __repr__(self) -> str: + return "<{} {}>".format( + self.__class__.__name__, + "connected" if self.transport is not None else "disconnected", + ) + + @property + def keepalive_timeout(self) -> float: + return self._keepalive_timeout + + async def shutdown(self, timeout: Optional[float] = 15.0) -> None: + """Do worker process exit preparations. + + We need to clean up everything and stop accepting requests. + It is especially important for keep-alive connections. + """ + self._force_close = True + + if self._keepalive_handle is not None: + self._keepalive_handle.cancel() + + if self._waiter: + self._waiter.cancel() + + # wait for handlers + with suppress(asyncio.CancelledError, asyncio.TimeoutError): + async with ceil_timeout(timeout): + if self._current_request is not None: + self._current_request._cancel(asyncio.CancelledError()) + + if self._task_handler is not None and not self._task_handler.done(): + await self._task_handler + + # force-close non-idle handler + if self._task_handler is not None: + self._task_handler.cancel() + + if self.transport is not None: + self.transport.close() + self.transport = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + super().connection_made(transport) + + real_transport = cast(asyncio.Transport, transport) + if self._tcp_keepalive: + tcp_keepalive(real_transport) + + self._task_handler = self._loop.create_task(self.start()) + assert self._manager is not None + self._manager.connection_made(self, real_transport) + + def connection_lost(self, exc: Optional[BaseException]) -> None: + if self._manager is None: + return + self._manager.connection_lost(self, exc) + + super().connection_lost(exc) + + self._manager = None + self._force_close = True + self._request_factory = None + self._request_handler = None + self._request_parser = None + + if self._keepalive_handle is not None: + self._keepalive_handle.cancel() + + if self._current_request is not None: + if exc is None: + exc = ConnectionResetError("Connection lost") + self._current_request._cancel(exc) + + if self._waiter is not None: + self._waiter.cancel() + + self._task_handler = None + + if self._payload_parser is not None: + self._payload_parser.feed_eof() + self._payload_parser = None + + def set_parser(self, parser: Any) -> None: + # Actual type is WebReader + assert self._payload_parser is None + + self._payload_parser = parser + + if self._message_tail: + self._payload_parser.feed_data(self._message_tail) + self._message_tail = b"" + + def eof_received(self) -> None: + pass + + def data_received(self, data: bytes) -> None: + if self._force_close or self._close: + return + # parse http messages + messages: Sequence[_MsgType] + if self._payload_parser is None and not self._upgrade: + assert self._request_parser is not None + try: + messages, upgraded, tail = self._request_parser.feed_data(data) + except HttpProcessingError as exc: + messages = [ + (_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD) + ] + upgraded = False + tail = b"" + + for msg, payload in messages or (): + self._request_count += 1 + self._messages.append((msg, payload)) + + waiter = self._waiter + if messages and waiter is not None and not waiter.done(): + # don't set result twice + waiter.set_result(None) + + self._upgrade = upgraded + if upgraded and tail: + self._message_tail = tail + + # no parser, just store + elif self._payload_parser is None and self._upgrade and data: + self._message_tail += data + + # feed payload + elif data: + eof, tail = self._payload_parser.feed_data(data) + if eof: + self.close() + + def keep_alive(self, val: bool) -> None: + """Set keep-alive connection mode. + + :param bool val: new state. + """ + self._keepalive = val + if self._keepalive_handle: + self._keepalive_handle.cancel() + self._keepalive_handle = None + + def close(self) -> None: + """Close connection. + + Stop accepting new pipelining messages and close + connection when handlers done processing messages. + """ + self._close = True + if self._waiter: + self._waiter.cancel() + + def force_close(self) -> None: + """Forcefully close connection.""" + self._force_close = True + if self._waiter: + self._waiter.cancel() + if self.transport is not None: + self.transport.close() + self.transport = None + + def log_access( + self, request: BaseRequest, response: StreamResponse, time: float + ) -> None: + if self.access_logger is not None: + self.access_logger.log(request, response, self._loop.time() - time) + + def log_debug(self, *args: Any, **kw: Any) -> None: + if self.debug: + self.logger.debug(*args, **kw) + + def log_exception(self, *args: Any, **kw: Any) -> None: + self.logger.exception(*args, **kw) + + def _process_keepalive(self) -> None: + if self._force_close or not self._keepalive: + return + + next = self._keepalive_time + self._keepalive_timeout + + # handler in idle state + if self._waiter: + if self._loop.time() > next: + self.force_close() + return + + # not all request handlers are done, + # reschedule itself to next second + self._keepalive_handle = self._loop.call_later( + self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive + ) + + async def _handle_request( + self, + request: BaseRequest, + start_time: float, + request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], + ) -> Tuple[StreamResponse, bool]: + assert self._request_handler is not None + try: + try: + self._current_request = request + resp = await request_handler(request) + finally: + self._current_request = None + except HTTPException as exc: + resp = exc + reset = await self.finish_response(request, resp, start_time) + except asyncio.CancelledError: + raise + except asyncio.TimeoutError as exc: + self.log_debug("Request handler timed out.", exc_info=exc) + resp = self.handle_error(request, 504) + reset = await self.finish_response(request, resp, start_time) + except Exception as exc: + resp = self.handle_error(request, 500, exc) + reset = await self.finish_response(request, resp, start_time) + else: + # Deprecation warning (See #2415) + if getattr(resp, "__http_exception__", False): + warnings.warn( + "returning HTTPException object is deprecated " + "(#2415) and will be removed, " + "please raise the exception instead", + DeprecationWarning, + ) + + reset = await self.finish_response(request, resp, start_time) + + return resp, reset + + async def start(self) -> None: + """Process incoming request. + + It reads request line, request headers and request payload, then + calls handle_request() method. Subclass has to override + handle_request(). start() handles various exceptions in request + or response handling. Connection is being closed always unless + keep_alive(True) specified. + """ + loop = self._loop + handler = self._task_handler + assert handler is not None + manager = self._manager + assert manager is not None + keepalive_timeout = self._keepalive_timeout + resp = None + assert self._request_factory is not None + assert self._request_handler is not None + + while not self._force_close: + if not self._messages: + try: + # wait for next request + self._waiter = loop.create_future() + await self._waiter + except asyncio.CancelledError: + break + finally: + self._waiter = None + + message, payload = self._messages.popleft() + + start = loop.time() + + manager.requests_count += 1 + writer = StreamWriter(self, loop) + if isinstance(message, _ErrInfo): + # make request_factory work + request_handler = self._make_error_handler(message) + message = ERROR + else: + request_handler = self._request_handler + + request = self._request_factory(message, payload, self, writer, handler) + try: + # a new task is used for copy context vars (#3406) + task = self._loop.create_task( + self._handle_request(request, start, request_handler) + ) + try: + resp, reset = await task + except (asyncio.CancelledError, ConnectionError): + self.log_debug("Ignored premature client disconnection") + break + + # Drop the processed task from asyncio.Task.all_tasks() early + del task + if reset: + self.log_debug("Ignored premature client disconnection 2") + break + + # notify server about keep-alive + self._keepalive = bool(resp.keep_alive) + + # check payload + if not payload.is_eof(): + lingering_time = self._lingering_time + if not self._force_close and lingering_time: + self.log_debug( + "Start lingering close timer for %s sec.", lingering_time + ) + + now = loop.time() + end_t = now + lingering_time + + with suppress(asyncio.TimeoutError, asyncio.CancelledError): + while not payload.is_eof() and now < end_t: + async with ceil_timeout(end_t - now): + # read and ignore + await payload.readany() + now = loop.time() + + # if payload still uncompleted + if not payload.is_eof() and not self._force_close: + self.log_debug("Uncompleted request.") + self.close() + + payload.set_exception(PayloadAccessError()) + + except asyncio.CancelledError: + self.log_debug("Ignored premature client disconnection ") + break + except RuntimeError as exc: + if self.debug: + self.log_exception("Unhandled runtime exception", exc_info=exc) + self.force_close() + except Exception as exc: + self.log_exception("Unhandled exception", exc_info=exc) + self.force_close() + finally: + if self.transport is None and resp is not None: + self.log_debug("Ignored premature client disconnection.") + elif not self._force_close: + if self._keepalive and not self._close: + # start keep-alive timer + if keepalive_timeout is not None: + now = self._loop.time() + self._keepalive_time = now + if self._keepalive_handle is None: + self._keepalive_handle = loop.call_at( + now + keepalive_timeout, self._process_keepalive + ) + else: + break + + # remove handler, close transport if no handlers left + if not self._force_close: + self._task_handler = None + if self.transport is not None: + self.transport.close() + + async def finish_response( + self, request: BaseRequest, resp: StreamResponse, start_time: float + ) -> bool: + """Prepare the response and write_eof, then log access. + + This has to + be called within the context of any exception so the access logger + can get exception information. Returns True if the client disconnects + prematurely. + """ + if self._request_parser is not None: + self._request_parser.set_upgraded(False) + self._upgrade = False + if self._message_tail: + self._request_parser.feed_data(self._message_tail) + self._message_tail = b"" + try: + prepare_meth = resp.prepare + except AttributeError: + if resp is None: + raise RuntimeError("Missing return " "statement on request handler") + else: + raise RuntimeError( + "Web-handler should return " + "a response instance, " + "got {!r}".format(resp) + ) + try: + await prepare_meth(request) + await resp.write_eof() + except ConnectionError: + self.log_access(request, resp, start_time) + return True + else: + self.log_access(request, resp, start_time) + return False + + def handle_error( + self, + request: BaseRequest, + status: int = 500, + exc: Optional[BaseException] = None, + message: Optional[str] = None, + ) -> StreamResponse: + """Handle errors. + + Returns HTTP response with specific status code. Logs additional + information. It always closes current connection. + """ + self.log_exception("Error handling request", exc_info=exc) + + # some data already got sent, connection is broken + if request.writer.output_size > 0: + raise ConnectionError( + "Response is sent already, cannot send another response " + "with the error message" + ) + + ct = "text/plain" + if status == HTTPStatus.INTERNAL_SERVER_ERROR: + title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR) + msg = HTTPStatus.INTERNAL_SERVER_ERROR.description + tb = None + if self.debug: + with suppress(Exception): + tb = traceback.format_exc() + + if "text/html" in request.headers.get("Accept", ""): + if tb: + tb = html_escape(tb) + msg = f"

Traceback:

\n
{tb}
" + message = ( + "" + "{title}" + "\n

{title}

" + "\n{msg}\n\n" + ).format(title=title, msg=msg) + ct = "text/html" + else: + if tb: + msg = tb + message = title + "\n\n" + msg + + resp = Response(status=status, text=message, content_type=ct) + resp.force_close() + + return resp + + def _make_error_handler( + self, err_info: _ErrInfo + ) -> Callable[[BaseRequest], Awaitable[StreamResponse]]: + async def handler(request: BaseRequest) -> StreamResponse: + return self.handle_error( + request, err_info.status, err_info.exc, err_info.message + ) + + return handler diff --git a/venv/lib/python3.10/site-packages/aiohttp/web_request.py b/venv/lib/python3.10/site-packages/aiohttp/web_request.py new file mode 100644 index 0000000..c02ebfc --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web_request.py @@ -0,0 +1,882 @@ +import asyncio +import datetime +import io +import re +import socket +import string +import tempfile +import types +import warnings +from http.cookies import SimpleCookie +from types import MappingProxyType +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterator, + Mapping, + MutableMapping, + Optional, + Pattern, + Tuple, + Union, + cast, +) +from urllib.parse import parse_qsl + +import attr +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy +from yarl import URL + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ( + DEBUG, + ETAG_ANY, + LIST_QUOTED_ETAG_RE, + ChainMapProxy, + ETag, + HeadersMixin, + parse_http_date, + reify, + sentinel, +) +from .http_parser import RawRequestMessage +from .http_writer import HttpVersion +from .multipart import BodyPartReader, MultipartReader +from .streams import EmptyStreamReader, StreamReader +from .typedefs import ( + DEFAULT_JSON_DECODER, + Final, + JSONDecoder, + LooseHeaders, + RawHeaders, + StrOrURL, +) +from .web_exceptions import HTTPRequestEntityTooLarge +from .web_response import StreamResponse + +__all__ = ("BaseRequest", "FileField", "Request") + + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application + from .web_protocol import RequestHandler + from .web_urldispatcher import UrlMappingMatchInfo + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class FileField: + name: str + filename: str + file: io.BufferedReader + content_type: str + headers: "CIMultiDictProxy[str]" + + +_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" +# '-' at the end to prevent interpretation as range in a char class + +_TOKEN: Final[str] = rf"[{_TCHAR}]+" + +_QDTEXT: Final[str] = r"[{}]".format( + r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F))) +) +# qdtext includes 0x5C to escape 0x5D ('\]') +# qdtext excludes obs-text (because obsoleted, and encoding not specified) + +_QUOTED_PAIR: Final[str] = r"\\[\t !-~]" + +_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format( + qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR +) + +_FORWARDED_PAIR: Final[ + str +] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( + token=_TOKEN, quoted_string=_QUOTED_STRING +) + +_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])") +# same pattern as _QUOTED_PAIR but contains a capture group + +_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR) + +############################################################ +# HTTP Request +############################################################ + + +class BaseRequest(MutableMapping[str, Any], HeadersMixin): + + POST_METHODS = { + hdrs.METH_PATCH, + hdrs.METH_POST, + hdrs.METH_PUT, + hdrs.METH_TRACE, + hdrs.METH_DELETE, + } + + ATTRS = HeadersMixin.ATTRS | frozenset( + [ + "_message", + "_protocol", + "_payload_writer", + "_payload", + "_headers", + "_method", + "_version", + "_rel_url", + "_post", + "_read_bytes", + "_state", + "_cache", + "_task", + "_client_max_size", + "_loop", + "_transport_sslcontext", + "_transport_peername", + ] + ) + + def __init__( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: "RequestHandler", + payload_writer: AbstractStreamWriter, + task: "asyncio.Task[None]", + loop: asyncio.AbstractEventLoop, + *, + client_max_size: int = 1024**2, + state: Optional[Dict[str, Any]] = None, + scheme: Optional[str] = None, + host: Optional[str] = None, + remote: Optional[str] = None, + ) -> None: + if state is None: + state = {} + self._message = message + self._protocol = protocol + self._payload_writer = payload_writer + + self._payload = payload + self._headers = message.headers + self._method = message.method + self._version = message.version + self._cache: Dict[str, Any] = {} + url = message.url + if url.is_absolute(): + # absolute URL is given, + # override auto-calculating url, host, and scheme + # all other properties should be good + self._cache["url"] = url + self._cache["host"] = url.host + self._cache["scheme"] = url.scheme + self._rel_url = url.relative() + else: + self._rel_url = message.url + self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None + self._read_bytes: Optional[bytes] = None + + self._state = state + self._task = task + self._client_max_size = client_max_size + self._loop = loop + + transport = self._protocol.transport + assert transport is not None + self._transport_sslcontext = transport.get_extra_info("sslcontext") + self._transport_peername = transport.get_extra_info("peername") + + if scheme is not None: + self._cache["scheme"] = scheme + if host is not None: + self._cache["host"] = host + if remote is not None: + self._cache["remote"] = remote + + def clone( + self, + *, + method: str = sentinel, + rel_url: StrOrURL = sentinel, + headers: LooseHeaders = sentinel, + scheme: str = sentinel, + host: str = sentinel, + remote: str = sentinel, + ) -> "BaseRequest": + """Clone itself with replacement some attributes. + + Creates and returns a new instance of Request object. If no parameters + are given, an exact copy is returned. If a parameter is not passed, it + will reuse the one from the current request object. + """ + if self._read_bytes: + raise RuntimeError("Cannot clone request " "after reading its content") + + dct: Dict[str, Any] = {} + if method is not sentinel: + dct["method"] = method + if rel_url is not sentinel: + new_url = URL(rel_url) + dct["url"] = new_url + dct["path"] = str(new_url) + if headers is not sentinel: + # a copy semantic + dct["headers"] = CIMultiDictProxy(CIMultiDict(headers)) + dct["raw_headers"] = tuple( + (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() + ) + + message = self._message._replace(**dct) + + kwargs = {} + if scheme is not sentinel: + kwargs["scheme"] = scheme + if host is not sentinel: + kwargs["host"] = host + if remote is not sentinel: + kwargs["remote"] = remote + + return self.__class__( + message, + self._payload, + self._protocol, + self._payload_writer, + self._task, + self._loop, + client_max_size=self._client_max_size, + state=self._state.copy(), + **kwargs, + ) + + @property + def task(self) -> "asyncio.Task[None]": + return self._task + + @property + def protocol(self) -> "RequestHandler": + return self._protocol + + @property + def transport(self) -> Optional[asyncio.Transport]: + if self._protocol is None: + return None + return self._protocol.transport + + @property + def writer(self) -> AbstractStreamWriter: + return self._payload_writer + + @reify + def message(self) -> RawRequestMessage: + warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3) + return self._message + + @reify + def rel_url(self) -> URL: + return self._rel_url + + @reify + def loop(self) -> asyncio.AbstractEventLoop: + warnings.warn( + "request.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + # MutableMapping API + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._state[key] = value + + def __delitem__(self, key: str) -> None: + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + ######## + + @reify + def secure(self) -> bool: + """A bool indicating if the request is handled with SSL.""" + return self.scheme == "https" + + @reify + def forwarded(self) -> Tuple[Mapping[str, str], ...]: + """A tuple containing all parsed Forwarded header(s). + + Makes an effort to parse Forwarded headers as specified by RFC 7239: + + - It adds one (immutable) dictionary per Forwarded 'field-value', ie + per proxy. The element corresponds to the data in the Forwarded + field-value added by the first proxy encountered by the client. Each + subsequent item corresponds to those added by later proxies. + - It checks that every value has valid syntax in general as specified + in section 4: either a 'token' or a 'quoted-string'. + - It un-escapes found escape sequences. + - It does NOT validate 'by' and 'for' contents as specified in section + 6. + - It does NOT validate 'host' contents (Host ABNF). + - It does NOT validate 'proto' contents for valid URI scheme names. + + Returns a tuple containing one or more immutable dicts + """ + elems = [] + for field_value in self._message.headers.getall(hdrs.FORWARDED, ()): + length = len(field_value) + pos = 0 + need_separator = False + elem: Dict[str, str] = {} + elems.append(types.MappingProxyType(elem)) + while 0 <= pos < length: + match = _FORWARDED_PAIR_RE.match(field_value, pos) + if match is not None: # got a valid forwarded-pair + if need_separator: + # bad syntax here, skip to next comma + pos = field_value.find(",", pos) + else: + name, value, port = match.groups() + if value[0] == '"': + # quoted string: remove quotes and unescape + value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1]) + if port: + value += port + elem[name.lower()] = value + pos += len(match.group(0)) + need_separator = True + elif field_value[pos] == ",": # next forwarded-element + need_separator = False + elem = {} + elems.append(types.MappingProxyType(elem)) + pos += 1 + elif field_value[pos] == ";": # next forwarded-pair + need_separator = False + pos += 1 + elif field_value[pos] in " \t": + # Allow whitespace even between forwarded-pairs, though + # RFC 7239 doesn't. This simplifies code and is in line + # with Postel's law. + pos += 1 + else: + # bad syntax here, skip to next comma + pos = field_value.find(",", pos) + return tuple(elems) + + @reify + def scheme(self) -> str: + """A string representing the scheme of the request. + + Hostname is resolved in this order: + + - overridden value by .clone(scheme=new_scheme) call. + - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise. + + 'http' or 'https'. + """ + if self._transport_sslcontext: + return "https" + else: + return "http" + + @reify + def method(self) -> str: + """Read only property for getting HTTP method. + + The value is upper-cased str like 'GET', 'POST', 'PUT' etc. + """ + return self._method + + @reify + def version(self) -> HttpVersion: + """Read only property for getting HTTP version of request. + + Returns aiohttp.protocol.HttpVersion instance. + """ + return self._version + + @reify + def host(self) -> str: + """Hostname of the request. + + Hostname is resolved in this order: + + - overridden value by .clone(host=new_host) call. + - HOST HTTP header + - socket.getfqdn() value + """ + host = self._message.headers.get(hdrs.HOST) + if host is not None: + return host + return socket.getfqdn() + + @reify + def remote(self) -> Optional[str]: + """Remote IP of client initiated HTTP request. + + The IP is resolved in this order: + + - overridden value by .clone(remote=new_remote) call. + - peername of opened socket + """ + if self._transport_peername is None: + return None + if isinstance(self._transport_peername, (list, tuple)): + return str(self._transport_peername[0]) + return str(self._transport_peername) + + @reify + def url(self) -> URL: + url = URL.build(scheme=self.scheme, host=self.host) + return url.join(self._rel_url) + + @reify + def path(self) -> str: + """The URL including *PATH INFO* without the host or scheme. + + E.g., ``/app/blog`` + """ + return self._rel_url.path + + @reify + def path_qs(self) -> str: + """The URL including PATH_INFO and the query string. + + E.g, /app/blog?id=10 + """ + return str(self._rel_url) + + @reify + def raw_path(self) -> str: + """The URL including raw *PATH INFO* without the host or scheme. + + Warning, the path is unquoted and may contains non valid URL characters + + E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters`` + """ + return self._message.path + + @reify + def query(self) -> "MultiDictProxy[str]": + """A multidict with all the variables in the query string.""" + return MultiDictProxy(self._rel_url.query) + + @reify + def query_string(self) -> str: + """The query string in the URL. + + E.g., id=10 + """ + return self._rel_url.query_string + + @reify + def headers(self) -> "CIMultiDictProxy[str]": + """A case-insensitive multidict proxy with all headers.""" + return self._headers + + @reify + def raw_headers(self) -> RawHeaders: + """A sequence of pairs for all headers.""" + return self._message.raw_headers + + @reify + def if_modified_since(self) -> Optional[datetime.datetime]: + """The value of If-Modified-Since HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE)) + + @reify + def if_unmodified_since(self) -> Optional[datetime.datetime]: + """The value of If-Unmodified-Since HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE)) + + @staticmethod + def _etag_values(etag_header: str) -> Iterator[ETag]: + """Extract `ETag` objects from raw header.""" + if etag_header == ETAG_ANY: + yield ETag( + is_weak=False, + value=ETAG_ANY, + ) + else: + for match in LIST_QUOTED_ETAG_RE.finditer(etag_header): + is_weak, value, garbage = match.group(2, 3, 4) + # Any symbol captured by 4th group means + # that the following sequence is invalid. + if garbage: + break + + yield ETag( + is_weak=bool(is_weak), + value=value, + ) + + @classmethod + def _if_match_or_none_impl( + cls, header_value: Optional[str] + ) -> Optional[Tuple[ETag, ...]]: + if not header_value: + return None + + return tuple(cls._etag_values(header_value)) + + @reify + def if_match(self) -> Optional[Tuple[ETag, ...]]: + """The value of If-Match HTTP header, or None. + + This header is represented as a `tuple` of `ETag` objects. + """ + return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH)) + + @reify + def if_none_match(self) -> Optional[Tuple[ETag, ...]]: + """The value of If-None-Match HTTP header, or None. + + This header is represented as a `tuple` of `ETag` objects. + """ + return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH)) + + @reify + def if_range(self) -> Optional[datetime.datetime]: + """The value of If-Range HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_RANGE)) + + @reify + def keep_alive(self) -> bool: + """Is keepalive enabled by client?""" + return not self._message.should_close + + @reify + def cookies(self) -> Mapping[str, str]: + """Return request cookies. + + A read-only dictionary-like object. + """ + raw = self.headers.get(hdrs.COOKIE, "") + parsed: SimpleCookie[str] = SimpleCookie(raw) + return MappingProxyType({key: val.value for key, val in parsed.items()}) + + @reify + def http_range(self) -> slice: + """The content of Range HTTP header. + + Return a slice instance. + + """ + rng = self._headers.get(hdrs.RANGE) + start, end = None, None + if rng is not None: + try: + pattern = r"^bytes=(\d*)-(\d*)$" + start, end = re.findall(pattern, rng)[0] + except IndexError: # pattern was not found in header + raise ValueError("range not in acceptable format") + + end = int(end) if end else None + start = int(start) if start else None + + if start is None and end is not None: + # end with no start is to return tail of content + start = -end + end = None + + if start is not None and end is not None: + # end is inclusive in range header, exclusive for slice + end += 1 + + if start >= end: + raise ValueError("start cannot be after end") + + if start is end is None: # No valid range supplied + raise ValueError("No start or end of range specified") + + return slice(start, end, 1) + + @reify + def content(self) -> StreamReader: + """Return raw payload stream.""" + return self._payload + + @property + def has_body(self) -> bool: + """Return True if request's HTTP BODY can be read, False otherwise.""" + warnings.warn( + "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2 + ) + return not self._payload.at_eof() + + @property + def can_read_body(self) -> bool: + """Return True if request's HTTP BODY can be read, False otherwise.""" + return not self._payload.at_eof() + + @reify + def body_exists(self) -> bool: + """Return True if request has HTTP BODY, False otherwise.""" + return type(self._payload) is not EmptyStreamReader + + async def release(self) -> None: + """Release request. + + Eat unread part of HTTP BODY if present. + """ + while not self._payload.at_eof(): + await self._payload.readany() + + async def read(self) -> bytes: + """Read request body if present. + + Returns bytes object with full request content. + """ + if self._read_bytes is None: + body = bytearray() + while True: + chunk = await self._payload.readany() + body.extend(chunk) + if self._client_max_size: + body_size = len(body) + if body_size >= self._client_max_size: + raise HTTPRequestEntityTooLarge( + max_size=self._client_max_size, actual_size=body_size + ) + if not chunk: + break + self._read_bytes = bytes(body) + return self._read_bytes + + async def text(self) -> str: + """Return BODY as text using encoding from .charset.""" + bytes_body = await self.read() + encoding = self.charset or "utf-8" + return bytes_body.decode(encoding) + + async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any: + """Return BODY as JSON.""" + body = await self.text() + return loads(body) + + async def multipart(self) -> MultipartReader: + """Return async iterator to process BODY as multipart.""" + return MultipartReader(self._headers, self._payload) + + async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": + """Return POST parameters.""" + if self._post is not None: + return self._post + if self._method not in self.POST_METHODS: + self._post = MultiDictProxy(MultiDict()) + return self._post + + content_type = self.content_type + if content_type not in ( + "", + "application/x-www-form-urlencoded", + "multipart/form-data", + ): + self._post = MultiDictProxy(MultiDict()) + return self._post + + out: MultiDict[Union[str, bytes, FileField]] = MultiDict() + + if content_type == "multipart/form-data": + multipart = await self.multipart() + max_size = self._client_max_size + + field = await multipart.next() + while field is not None: + size = 0 + field_ct = field.headers.get(hdrs.CONTENT_TYPE) + + if isinstance(field, BodyPartReader): + assert field.name is not None + + # Note that according to RFC 7578, the Content-Type header + # is optional, even for files, so we can't assume it's + # present. + # https://tools.ietf.org/html/rfc7578#section-4.4 + if field.filename: + # store file in temp file + tmp = tempfile.TemporaryFile() + chunk = await field.read_chunk(size=2**16) + while chunk: + chunk = field.decode(chunk) + tmp.write(chunk) + size += len(chunk) + if 0 < max_size < size: + tmp.close() + raise HTTPRequestEntityTooLarge( + max_size=max_size, actual_size=size + ) + chunk = await field.read_chunk(size=2**16) + tmp.seek(0) + + if field_ct is None: + field_ct = "application/octet-stream" + + ff = FileField( + field.name, + field.filename, + cast(io.BufferedReader, tmp), + field_ct, + field.headers, + ) + out.add(field.name, ff) + else: + # deal with ordinary data + value = await field.read(decode=True) + if field_ct is None or field_ct.startswith("text/"): + charset = field.get_charset(default="utf-8") + out.add(field.name, value.decode(charset)) + else: + out.add(field.name, value) + size += len(value) + if 0 < max_size < size: + raise HTTPRequestEntityTooLarge( + max_size=max_size, actual_size=size + ) + else: + raise ValueError( + "To decode nested multipart you need " "to use custom reader", + ) + + field = await multipart.next() + else: + data = await self.read() + if data: + charset = self.charset or "utf-8" + out.extend( + parse_qsl( + data.rstrip().decode(charset), + keep_blank_values=True, + encoding=charset, + ) + ) + + self._post = MultiDictProxy(out) + return self._post + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Extra info from protocol transport""" + protocol = self._protocol + if protocol is None: + return default + + transport = protocol.transport + if transport is None: + return default + + return transport.get_extra_info(name, default) + + def __repr__(self) -> str: + ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode( + "ascii" + ) + return "<{} {} {} >".format( + self.__class__.__name__, self._method, ascii_encodable_path + ) + + def __eq__(self, other: object) -> bool: + return id(self) == id(other) + + def __bool__(self) -> bool: + return True + + async def _prepare_hook(self, response: StreamResponse) -> None: + return + + def _cancel(self, exc: BaseException) -> None: + self._payload.set_exception(exc) + + +class Request(BaseRequest): + + ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"]) + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + # matchdict, route_name, handler + # or information about traversal lookup + + # initialized after route resolving + self._match_info: Optional[UrlMappingMatchInfo] = None + + if DEBUG: + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom {}.{} attribute " + "is discouraged".format(self.__class__.__name__, name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + def clone( + self, + *, + method: str = sentinel, + rel_url: StrOrURL = sentinel, + headers: LooseHeaders = sentinel, + scheme: str = sentinel, + host: str = sentinel, + remote: str = sentinel, + ) -> "Request": + ret = super().clone( + method=method, + rel_url=rel_url, + headers=headers, + scheme=scheme, + host=host, + remote=remote, + ) + new_ret = cast(Request, ret) + new_ret._match_info = self._match_info + return new_ret + + @reify + def match_info(self) -> "UrlMappingMatchInfo": + """Result of route resolving.""" + match_info = self._match_info + assert match_info is not None + return match_info + + @property + def app(self) -> "Application": + """Application instance.""" + match_info = self._match_info + assert match_info is not None + return match_info.current_app + + @property + def config_dict(self) -> ChainMapProxy: + match_info = self._match_info + assert match_info is not None + lst = match_info.apps + app = self.app + idx = lst.index(app) + sublist = list(reversed(lst[: idx + 1])) + return ChainMapProxy(sublist) + + async def _prepare_hook(self, response: StreamResponse) -> None: + match_info = self._match_info + if match_info is None: + return + for app in match_info._apps: + await app.on_response_prepare.send(self, response) diff --git a/venv/lib/python3.10/site-packages/aiohttp/web_response.py b/venv/lib/python3.10/site-packages/aiohttp/web_response.py new file mode 100644 index 0000000..ce07f81 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web_response.py @@ -0,0 +1,825 @@ +import asyncio +import collections.abc +import datetime +import enum +import json +import math +import time +import warnings +import zlib +from concurrent.futures import Executor +from http.cookies import Morsel, SimpleCookie +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterator, + Mapping, + MutableMapping, + Optional, + Tuple, + Union, + cast, +) + +from multidict import CIMultiDict, istr + +from . import hdrs, payload +from .abc import AbstractStreamWriter +from .helpers import ( + ETAG_ANY, + PY_38, + QUOTED_ETAG_RE, + ETag, + HeadersMixin, + parse_http_date, + rfc822_formatted_time, + sentinel, + validate_etag_value, +) +from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11 +from .payload import Payload +from .typedefs import JSONEncoder, LooseHeaders + +__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") + + +if TYPE_CHECKING: # pragma: no cover + from .web_request import BaseRequest + + BaseClass = MutableMapping[str, Any] +else: + BaseClass = collections.abc.MutableMapping + + +if not PY_38: + # allow samesite to be used in python < 3.8 + # already permitted in python 3.8, see https://bugs.python.org/issue29613 + Morsel._reserved["samesite"] = "SameSite" # type: ignore[attr-defined] + + +class ContentCoding(enum.Enum): + # The content codings that we have support for. + # + # Additional registered codings are listed at: + # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding + deflate = "deflate" + gzip = "gzip" + identity = "identity" + + +############################################################ +# HTTP Response classes +############################################################ + + +class StreamResponse(BaseClass, HeadersMixin): + + _length_check = True + + def __init__( + self, + *, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + ) -> None: + self._body = None + self._keep_alive: Optional[bool] = None + self._chunked = False + self._compression = False + self._compression_force: Optional[ContentCoding] = None + self._cookies: SimpleCookie[str] = SimpleCookie() + + self._req: Optional[BaseRequest] = None + self._payload_writer: Optional[AbstractStreamWriter] = None + self._eof_sent = False + self._body_length = 0 + self._state: Dict[str, Any] = {} + + if headers is not None: + self._headers: CIMultiDict[str] = CIMultiDict(headers) + else: + self._headers = CIMultiDict() + + self.set_status(status, reason) + + @property + def prepared(self) -> bool: + return self._payload_writer is not None + + @property + def task(self) -> "Optional[asyncio.Task[None]]": + if self._req: + return self._req.task + else: + return None + + @property + def status(self) -> int: + return self._status + + @property + def chunked(self) -> bool: + return self._chunked + + @property + def compression(self) -> bool: + return self._compression + + @property + def reason(self) -> str: + return self._reason + + def set_status( + self, + status: int, + reason: Optional[str] = None, + _RESPONSES: Mapping[int, Tuple[str, str]] = RESPONSES, + ) -> None: + assert not self.prepared, ( + "Cannot change the response status code after " "the headers have been sent" + ) + self._status = int(status) + if reason is None: + try: + reason = _RESPONSES[self._status][0] + except Exception: + reason = "" + self._reason = reason + + @property + def keep_alive(self) -> Optional[bool]: + return self._keep_alive + + def force_close(self) -> None: + self._keep_alive = False + + @property + def body_length(self) -> int: + return self._body_length + + @property + def output_length(self) -> int: + warnings.warn("output_length is deprecated", DeprecationWarning) + assert self._payload_writer + return self._payload_writer.buffer_size + + def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: + """Enables automatic chunked transfer encoding.""" + self._chunked = True + + if hdrs.CONTENT_LENGTH in self._headers: + raise RuntimeError( + "You can't enable chunked encoding when " "a content length is set" + ) + if chunk_size is not None: + warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) + + def enable_compression( + self, force: Optional[Union[bool, ContentCoding]] = None + ) -> None: + """Enables response compression encoding.""" + # Backwards compatibility for when force was a bool <0.17. + if type(force) == bool: + force = ContentCoding.deflate if force else ContentCoding.identity + warnings.warn( + "Using boolean for force is deprecated #3318", DeprecationWarning + ) + elif force is not None: + assert isinstance(force, ContentCoding), ( + "force should one of " "None, bool or " "ContentEncoding" + ) + + self._compression = True + self._compression_force = force + + @property + def headers(self) -> "CIMultiDict[str]": + return self._headers + + @property + def cookies(self) -> "SimpleCookie[str]": + return self._cookies + + def set_cookie( + self, + name: str, + value: str, + *, + expires: Optional[str] = None, + domain: Optional[str] = None, + max_age: Optional[Union[int, str]] = None, + path: str = "/", + secure: Optional[bool] = None, + httponly: Optional[bool] = None, + version: Optional[str] = None, + samesite: Optional[str] = None, + ) -> None: + """Set or update response cookie. + + Sets new cookie or updates existent with new value. + Also updates only those params which are not None. + """ + old = self._cookies.get(name) + if old is not None and old.coded_value == "": + # deleted cookie + self._cookies.pop(name, None) + + self._cookies[name] = value + c = self._cookies[name] + + if expires is not None: + c["expires"] = expires + elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT": + del c["expires"] + + if domain is not None: + c["domain"] = domain + + if max_age is not None: + c["max-age"] = str(max_age) + elif "max-age" in c: + del c["max-age"] + + c["path"] = path + + if secure is not None: + c["secure"] = secure + if httponly is not None: + c["httponly"] = httponly + if version is not None: + c["version"] = version + if samesite is not None: + c["samesite"] = samesite + + def del_cookie( + self, name: str, *, domain: Optional[str] = None, path: str = "/" + ) -> None: + """Delete cookie. + + Creates new empty expired cookie. + """ + # TODO: do we need domain/path here? + self._cookies.pop(name, None) + self.set_cookie( + name, + "", + max_age=0, + expires="Thu, 01 Jan 1970 00:00:00 GMT", + domain=domain, + path=path, + ) + + @property + def content_length(self) -> Optional[int]: + # Just a placeholder for adding setter + return super().content_length + + @content_length.setter + def content_length(self, value: Optional[int]) -> None: + if value is not None: + value = int(value) + if self._chunked: + raise RuntimeError( + "You can't set content length when " "chunked encoding is enable" + ) + self._headers[hdrs.CONTENT_LENGTH] = str(value) + else: + self._headers.pop(hdrs.CONTENT_LENGTH, None) + + @property + def content_type(self) -> str: + # Just a placeholder for adding setter + return super().content_type + + @content_type.setter + def content_type(self, value: str) -> None: + self.content_type # read header values if needed + self._content_type = str(value) + self._generate_content_type_header() + + @property + def charset(self) -> Optional[str]: + # Just a placeholder for adding setter + return super().charset + + @charset.setter + def charset(self, value: Optional[str]) -> None: + ctype = self.content_type # read header values if needed + if ctype == "application/octet-stream": + raise RuntimeError( + "Setting charset for application/octet-stream " + "doesn't make sense, setup content_type first" + ) + assert self._content_dict is not None + if value is None: + self._content_dict.pop("charset", None) + else: + self._content_dict["charset"] = str(value).lower() + self._generate_content_type_header() + + @property + def last_modified(self) -> Optional[datetime.datetime]: + """The value of Last-Modified HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED)) + + @last_modified.setter + def last_modified( + self, value: Optional[Union[int, float, datetime.datetime, str]] + ) -> None: + if value is None: + self._headers.pop(hdrs.LAST_MODIFIED, None) + elif isinstance(value, (int, float)): + self._headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value)) + ) + elif isinstance(value, datetime.datetime): + self._headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple() + ) + elif isinstance(value, str): + self._headers[hdrs.LAST_MODIFIED] = value + + @property + def etag(self) -> Optional[ETag]: + quoted_value = self._headers.get(hdrs.ETAG) + if not quoted_value: + return None + elif quoted_value == ETAG_ANY: + return ETag(value=ETAG_ANY) + match = QUOTED_ETAG_RE.fullmatch(quoted_value) + if not match: + return None + is_weak, value = match.group(1, 2) + return ETag( + is_weak=bool(is_weak), + value=value, + ) + + @etag.setter + def etag(self, value: Optional[Union[ETag, str]]) -> None: + if value is None: + self._headers.pop(hdrs.ETAG, None) + elif (isinstance(value, str) and value == ETAG_ANY) or ( + isinstance(value, ETag) and value.value == ETAG_ANY + ): + self._headers[hdrs.ETAG] = ETAG_ANY + elif isinstance(value, str): + validate_etag_value(value) + self._headers[hdrs.ETAG] = f'"{value}"' + elif isinstance(value, ETag) and isinstance(value.value, str): + validate_etag_value(value.value) + hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"' + self._headers[hdrs.ETAG] = hdr_value + else: + raise ValueError( + f"Unsupported etag type: {type(value)}. " + f"etag must be str, ETag or None" + ) + + def _generate_content_type_header( + self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE + ) -> None: + assert self._content_dict is not None + assert self._content_type is not None + params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items()) + if params: + ctype = self._content_type + "; " + params + else: + ctype = self._content_type + self._headers[CONTENT_TYPE] = ctype + + async def _do_start_compression(self, coding: ContentCoding) -> None: + if coding != ContentCoding.identity: + assert self._payload_writer is not None + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._payload_writer.enable_compression(coding.value) + # Compressed payload may have different content length, + # remove the header + self._headers.popall(hdrs.CONTENT_LENGTH, None) + + async def _start_compression(self, request: "BaseRequest") -> None: + if self._compression_force: + await self._do_start_compression(self._compression_force) + else: + accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + for coding in ContentCoding: + if coding.value in accept_encoding: + await self._do_start_compression(coding) + return + + async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: + if self._eof_sent: + return None + if self._payload_writer is not None: + return self._payload_writer + + return await self._start(request) + + async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: + self._req = request + writer = self._payload_writer = request._payload_writer + + await self._prepare_headers() + await request._prepare_hook(self) + await self._write_headers() + + return writer + + async def _prepare_headers(self) -> None: + request = self._req + assert request is not None + writer = self._payload_writer + assert writer is not None + keep_alive = self._keep_alive + if keep_alive is None: + keep_alive = request.keep_alive + self._keep_alive = keep_alive + + version = request.version + + headers = self._headers + for cookie in self._cookies.values(): + value = cookie.output(header="")[1:] + headers.add(hdrs.SET_COOKIE, value) + + if self._compression: + await self._start_compression(request) + + if self._chunked: + if version != HttpVersion11: + raise RuntimeError( + "Using chunked encoding is forbidden " + "for HTTP/{0.major}.{0.minor}".format(request.version) + ) + writer.enable_chunking() + headers[hdrs.TRANSFER_ENCODING] = "chunked" + if hdrs.CONTENT_LENGTH in headers: + del headers[hdrs.CONTENT_LENGTH] + elif self._length_check: + writer.length = self.content_length + if writer.length is None: + if version >= HttpVersion11 and self.status != 204: + writer.enable_chunking() + headers[hdrs.TRANSFER_ENCODING] = "chunked" + if hdrs.CONTENT_LENGTH in headers: + del headers[hdrs.CONTENT_LENGTH] + else: + keep_alive = False + # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2 + # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4 + elif version >= HttpVersion11 and self.status in (100, 101, 102, 103, 204): + del headers[hdrs.CONTENT_LENGTH] + + if self.status not in (204, 304): + headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") + headers.setdefault(hdrs.DATE, rfc822_formatted_time()) + headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) + + # connection header + if hdrs.CONNECTION not in headers: + if keep_alive: + if version == HttpVersion10: + headers[hdrs.CONNECTION] = "keep-alive" + else: + if version == HttpVersion11: + headers[hdrs.CONNECTION] = "close" + + async def _write_headers(self) -> None: + request = self._req + assert request is not None + writer = self._payload_writer + assert writer is not None + # status line + version = request.version + status_line = "HTTP/{}.{} {} {}".format( + version[0], version[1], self._status, self._reason + ) + await writer.write_headers(status_line, self._headers) + + async def write(self, data: bytes) -> None: + assert isinstance( + data, (bytes, bytearray, memoryview) + ), "data argument must be byte-ish (%r)" % type(data) + + if self._eof_sent: + raise RuntimeError("Cannot call write() after write_eof()") + if self._payload_writer is None: + raise RuntimeError("Cannot call write() before prepare()") + + await self._payload_writer.write(data) + + async def drain(self) -> None: + assert not self._eof_sent, "EOF has already been sent" + assert self._payload_writer is not None, "Response has not been started" + warnings.warn( + "drain method is deprecated, use await resp.write()", + DeprecationWarning, + stacklevel=2, + ) + await self._payload_writer.drain() + + async def write_eof(self, data: bytes = b"") -> None: + assert isinstance( + data, (bytes, bytearray, memoryview) + ), "data argument must be byte-ish (%r)" % type(data) + + if self._eof_sent: + return + + assert self._payload_writer is not None, "Response has not been started" + + await self._payload_writer.write_eof(data) + self._eof_sent = True + self._req = None + self._body_length = self._payload_writer.output_size + self._payload_writer = None + + def __repr__(self) -> str: + if self._eof_sent: + info = "eof" + elif self.prepared: + assert self._req is not None + info = f"{self._req.method} {self._req.path} " + else: + info = "not prepared" + return f"<{self.__class__.__name__} {self.reason} {info}>" + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._state[key] = value + + def __delitem__(self, key: str) -> None: + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + def __hash__(self) -> int: + return hash(id(self)) + + def __eq__(self, other: object) -> bool: + return self is other + + +class Response(StreamResponse): + def __init__( + self, + *, + body: Any = None, + status: int = 200, + reason: Optional[str] = None, + text: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + content_type: Optional[str] = None, + charset: Optional[str] = None, + zlib_executor_size: Optional[int] = None, + zlib_executor: Optional[Executor] = None, + ) -> None: + if body is not None and text is not None: + raise ValueError("body and text are not allowed together") + + if headers is None: + real_headers: CIMultiDict[str] = CIMultiDict() + elif not isinstance(headers, CIMultiDict): + real_headers = CIMultiDict(headers) + else: + real_headers = headers # = cast('CIMultiDict[str]', headers) + + if content_type is not None and "charset" in content_type: + raise ValueError("charset must not be in content_type " "argument") + + if text is not None: + if hdrs.CONTENT_TYPE in real_headers: + if content_type or charset: + raise ValueError( + "passing both Content-Type header and " + "content_type or charset params " + "is forbidden" + ) + else: + # fast path for filling headers + if not isinstance(text, str): + raise TypeError("text argument must be str (%r)" % type(text)) + if content_type is None: + content_type = "text/plain" + if charset is None: + charset = "utf-8" + real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset + body = text.encode(charset) + text = None + else: + if hdrs.CONTENT_TYPE in real_headers: + if content_type is not None or charset is not None: + raise ValueError( + "passing both Content-Type header and " + "content_type or charset params " + "is forbidden" + ) + else: + if content_type is not None: + if charset is not None: + content_type += "; charset=" + charset + real_headers[hdrs.CONTENT_TYPE] = content_type + + super().__init__(status=status, reason=reason, headers=real_headers) + + if text is not None: + self.text = text + else: + self.body = body + + self._compressed_body: Optional[bytes] = None + self._zlib_executor_size = zlib_executor_size + self._zlib_executor = zlib_executor + + @property + def body(self) -> Optional[Union[bytes, Payload]]: + return self._body + + @body.setter + def body( + self, + body: bytes, + CONTENT_TYPE: istr = hdrs.CONTENT_TYPE, + CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH, + ) -> None: + if body is None: + self._body: Optional[bytes] = None + self._body_payload: bool = False + elif isinstance(body, (bytes, bytearray)): + self._body = body + self._body_payload = False + else: + try: + self._body = body = payload.PAYLOAD_REGISTRY.get(body) + except payload.LookupError: + raise ValueError("Unsupported body type %r" % type(body)) + + self._body_payload = True + + headers = self._headers + + # set content-length header if needed + if not self._chunked and CONTENT_LENGTH not in headers: + size = body.size + if size is not None: + headers[CONTENT_LENGTH] = str(size) + + # set content-type + if CONTENT_TYPE not in headers: + headers[CONTENT_TYPE] = body.content_type + + # copy payload headers + if body.headers: + for (key, value) in body.headers.items(): + if key not in headers: + headers[key] = value + + self._compressed_body = None + + @property + def text(self) -> Optional[str]: + if self._body is None: + return None + return self._body.decode(self.charset or "utf-8") + + @text.setter + def text(self, text: str) -> None: + assert text is None or isinstance( + text, str + ), "text argument must be str (%r)" % type(text) + + if self.content_type == "application/octet-stream": + self.content_type = "text/plain" + if self.charset is None: + self.charset = "utf-8" + + self._body = text.encode(self.charset) + self._body_payload = False + self._compressed_body = None + + @property + def content_length(self) -> Optional[int]: + if self._chunked: + return None + + if hdrs.CONTENT_LENGTH in self._headers: + return super().content_length + + if self._compressed_body is not None: + # Return length of the compressed body + return len(self._compressed_body) + elif self._body_payload: + # A payload without content length, or a compressed payload + return None + elif self._body is not None: + return len(self._body) + else: + return 0 + + @content_length.setter + def content_length(self, value: Optional[int]) -> None: + raise RuntimeError("Content length is set automatically") + + async def write_eof(self, data: bytes = b"") -> None: + if self._eof_sent: + return + if self._compressed_body is None: + body: Optional[Union[bytes, Payload]] = self._body + else: + body = self._compressed_body + assert not data, f"data arg is not supported, got {data!r}" + assert self._req is not None + assert self._payload_writer is not None + if body is not None: + if self._req._method == hdrs.METH_HEAD or self._status in [204, 304]: + await super().write_eof() + elif self._body_payload: + payload = cast(Payload, body) + await payload.write(self._payload_writer) + await super().write_eof() + else: + await super().write_eof(cast(bytes, body)) + else: + await super().write_eof() + + async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: + if not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: + if not self._body_payload: + if self._body is not None: + self._headers[hdrs.CONTENT_LENGTH] = str(len(self._body)) + else: + self._headers[hdrs.CONTENT_LENGTH] = "0" + + return await super()._start(request) + + def _compress_body(self, zlib_mode: int) -> None: + assert zlib_mode > 0 + compressobj = zlib.compressobj(wbits=zlib_mode) + body_in = self._body + assert body_in is not None + self._compressed_body = compressobj.compress(body_in) + compressobj.flush() + + async def _do_start_compression(self, coding: ContentCoding) -> None: + if self._body_payload or self._chunked: + return await super()._do_start_compression(coding) + + if coding != ContentCoding.identity: + # Instead of using _payload_writer.enable_compression, + # compress the whole body + zlib_mode = ( + 16 + zlib.MAX_WBITS if coding == ContentCoding.gzip else zlib.MAX_WBITS + ) + body_in = self._body + assert body_in is not None + if ( + self._zlib_executor_size is not None + and len(body_in) > self._zlib_executor_size + ): + await asyncio.get_event_loop().run_in_executor( + self._zlib_executor, self._compress_body, zlib_mode + ) + else: + self._compress_body(zlib_mode) + + body_out = self._compressed_body + assert body_out is not None + + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._headers[hdrs.CONTENT_LENGTH] = str(len(body_out)) + + +def json_response( + data: Any = sentinel, + *, + text: Optional[str] = None, + body: Optional[bytes] = None, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + content_type: str = "application/json", + dumps: JSONEncoder = json.dumps, +) -> Response: + if data is not sentinel: + if text or body: + raise ValueError("only one of data, text, or body should be specified") + else: + text = dumps(data) + return Response( + text=text, + body=body, + status=status, + reason=reason, + headers=headers, + content_type=content_type, + ) diff --git a/venv/lib/python3.10/site-packages/aiohttp/web_routedef.py b/venv/lib/python3.10/site-packages/aiohttp/web_routedef.py new file mode 100644 index 0000000..a1eb0a7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web_routedef.py @@ -0,0 +1,216 @@ +import abc +import os # noqa +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterator, + List, + Optional, + Sequence, + Type, + Union, + overload, +) + +import attr + +from . import hdrs +from .abc import AbstractView +from .typedefs import Handler, PathLike + +if TYPE_CHECKING: # pragma: no cover + from .web_request import Request + from .web_response import StreamResponse + from .web_urldispatcher import AbstractRoute, UrlDispatcher +else: + Request = StreamResponse = UrlDispatcher = AbstractRoute = None + + +__all__ = ( + "AbstractRouteDef", + "RouteDef", + "StaticDef", + "RouteTableDef", + "head", + "options", + "get", + "post", + "patch", + "put", + "delete", + "route", + "view", + "static", +) + + +class AbstractRouteDef(abc.ABC): + @abc.abstractmethod + def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + pass # pragma: no cover + + +_HandlerType = Union[Type[AbstractView], Handler] + + +@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) +class RouteDef(AbstractRouteDef): + method: str + path: str + handler: _HandlerType + kwargs: Dict[str, Any] + + def __repr__(self) -> str: + info = [] + for name, value in sorted(self.kwargs.items()): + info.append(f", {name}={value!r}") + return " {handler.__name__!r}" "{info}>".format( + method=self.method, path=self.path, handler=self.handler, info="".join(info) + ) + + def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + if self.method in hdrs.METH_ALL: + reg = getattr(router, "add_" + self.method.lower()) + return [reg(self.path, self.handler, **self.kwargs)] + else: + return [ + router.add_route(self.method, self.path, self.handler, **self.kwargs) + ] + + +@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) +class StaticDef(AbstractRouteDef): + prefix: str + path: PathLike + kwargs: Dict[str, Any] + + def __repr__(self) -> str: + info = [] + for name, value in sorted(self.kwargs.items()): + info.append(f", {name}={value!r}") + return " {path}" "{info}>".format( + prefix=self.prefix, path=self.path, info="".join(info) + ) + + def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + resource = router.add_static(self.prefix, self.path, **self.kwargs) + routes = resource.get_info().get("routes", {}) + return list(routes.values()) + + +def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return RouteDef(method, path, handler, kwargs) + + +def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_HEAD, path, handler, **kwargs) + + +def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_OPTIONS, path, handler, **kwargs) + + +def get( + path: str, + handler: _HandlerType, + *, + name: Optional[str] = None, + allow_head: bool = True, + **kwargs: Any, +) -> RouteDef: + return route( + hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs + ) + + +def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_POST, path, handler, **kwargs) + + +def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_PUT, path, handler, **kwargs) + + +def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_PATCH, path, handler, **kwargs) + + +def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_DELETE, path, handler, **kwargs) + + +def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef: + return route(hdrs.METH_ANY, path, handler, **kwargs) + + +def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef: + return StaticDef(prefix, path, kwargs) + + +_Deco = Callable[[_HandlerType], _HandlerType] + + +class RouteTableDef(Sequence[AbstractRouteDef]): + """Route definition table""" + + def __init__(self) -> None: + self._items: List[AbstractRouteDef] = [] + + def __repr__(self) -> str: + return f"" + + @overload + def __getitem__(self, index: int) -> AbstractRouteDef: + ... + + @overload + def __getitem__(self, index: slice) -> List[AbstractRouteDef]: + ... + + def __getitem__(self, index): # type: ignore[no-untyped-def] + return self._items[index] + + def __iter__(self) -> Iterator[AbstractRouteDef]: + return iter(self._items) + + def __len__(self) -> int: + return len(self._items) + + def __contains__(self, item: object) -> bool: + return item in self._items + + def route(self, method: str, path: str, **kwargs: Any) -> _Deco: + def inner(handler: _HandlerType) -> _HandlerType: + self._items.append(RouteDef(method, path, handler, kwargs)) + return handler + + return inner + + def head(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_HEAD, path, **kwargs) + + def get(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_GET, path, **kwargs) + + def post(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_POST, path, **kwargs) + + def put(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_PUT, path, **kwargs) + + def patch(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_PATCH, path, **kwargs) + + def delete(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_DELETE, path, **kwargs) + + def options(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_OPTIONS, path, **kwargs) + + def view(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_ANY, path, **kwargs) + + def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None: + self._items.append(StaticDef(prefix, path, kwargs)) diff --git a/venv/lib/python3.10/site-packages/aiohttp/web_runner.py b/venv/lib/python3.10/site-packages/aiohttp/web_runner.py new file mode 100644 index 0000000..9282bb9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web_runner.py @@ -0,0 +1,381 @@ +import asyncio +import signal +import socket +from abc import ABC, abstractmethod +from typing import Any, List, Optional, Set + +from yarl import URL + +from .web_app import Application +from .web_server import Server + +try: + from ssl import SSLContext +except ImportError: + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ( + "BaseSite", + "TCPSite", + "UnixSite", + "NamedPipeSite", + "SockSite", + "BaseRunner", + "AppRunner", + "ServerRunner", + "GracefulExit", +) + + +class GracefulExit(SystemExit): + code = 1 + + +def _raise_graceful_exit() -> None: + raise GracefulExit() + + +class BaseSite(ABC): + __slots__ = ("_runner", "_shutdown_timeout", "_ssl_context", "_backlog", "_server") + + def __init__( + self, + runner: "BaseRunner", + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + ) -> None: + if runner.server is None: + raise RuntimeError("Call runner.setup() before making a site") + self._runner = runner + self._shutdown_timeout = shutdown_timeout + self._ssl_context = ssl_context + self._backlog = backlog + self._server: Optional[asyncio.AbstractServer] = None + + @property + @abstractmethod + def name(self) -> str: + pass # pragma: no cover + + @abstractmethod + async def start(self) -> None: + self._runner._reg_site(self) + + async def stop(self) -> None: + self._runner._check_site(self) + if self._server is None: + self._runner._unreg_site(self) + return # not started yet + self._server.close() + # named pipes do not have wait_closed property + if hasattr(self._server, "wait_closed"): + await self._server.wait_closed() + await self._runner.shutdown() + assert self._runner.server + await self._runner.server.shutdown(self._shutdown_timeout) + self._runner._unreg_site(self) + + +class TCPSite(BaseSite): + __slots__ = ("_host", "_port", "_reuse_address", "_reuse_port") + + def __init__( + self, + runner: "BaseRunner", + host: Optional[str] = None, + port: Optional[int] = None, + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, + ) -> None: + super().__init__( + runner, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + self._host = host + if port is None: + port = 8443 if self._ssl_context else 8080 + self._port = port + self._reuse_address = reuse_address + self._reuse_port = reuse_port + + @property + def name(self) -> str: + scheme = "https" if self._ssl_context else "http" + host = "0.0.0.0" if self._host is None else self._host + return str(URL.build(scheme=scheme, host=host, port=self._port)) + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_server( + server, + self._host, + self._port, + ssl=self._ssl_context, + backlog=self._backlog, + reuse_address=self._reuse_address, + reuse_port=self._reuse_port, + ) + + +class UnixSite(BaseSite): + __slots__ = ("_path",) + + def __init__( + self, + runner: "BaseRunner", + path: str, + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + ) -> None: + super().__init__( + runner, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + self._path = path + + @property + def name(self) -> str: + scheme = "https" if self._ssl_context else "http" + return f"{scheme}://unix:{self._path}:" + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_unix_server( + server, self._path, ssl=self._ssl_context, backlog=self._backlog + ) + + +class NamedPipeSite(BaseSite): + __slots__ = ("_path",) + + def __init__( + self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0 + ) -> None: + loop = asyncio.get_event_loop() + if not isinstance( + loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] + ): + raise RuntimeError( + "Named Pipes only available in proactor" "loop under windows" + ) + super().__init__(runner, shutdown_timeout=shutdown_timeout) + self._path = path + + @property + def name(self) -> str: + return self._path + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + _server = await loop.start_serving_pipe( # type: ignore[attr-defined] + server, self._path + ) + self._server = _server[0] + + +class SockSite(BaseSite): + __slots__ = ("_sock", "_name") + + def __init__( + self, + runner: "BaseRunner", + sock: socket.socket, + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + ) -> None: + super().__init__( + runner, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + self._sock = sock + scheme = "https" if self._ssl_context else "http" + if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX: + name = f"{scheme}://unix:{sock.getsockname()}:" + else: + host, port = sock.getsockname()[:2] + name = str(URL.build(scheme=scheme, host=host, port=port)) + self._name = name + + @property + def name(self) -> str: + return self._name + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_server( + server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog + ) + + +class BaseRunner(ABC): + __slots__ = ("_handle_signals", "_kwargs", "_server", "_sites") + + def __init__(self, *, handle_signals: bool = False, **kwargs: Any) -> None: + self._handle_signals = handle_signals + self._kwargs = kwargs + self._server: Optional[Server] = None + self._sites: List[BaseSite] = [] + + @property + def server(self) -> Optional[Server]: + return self._server + + @property + def addresses(self) -> List[Any]: + ret: List[Any] = [] + for site in self._sites: + server = site._server + if server is not None: + sockets = server.sockets + if sockets is not None: + for sock in sockets: + ret.append(sock.getsockname()) + return ret + + @property + def sites(self) -> Set[BaseSite]: + return set(self._sites) + + async def setup(self) -> None: + loop = asyncio.get_event_loop() + + if self._handle_signals: + try: + loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit) + loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit) + except NotImplementedError: # pragma: no cover + # add_signal_handler is not implemented on Windows + pass + + self._server = await self._make_server() + + @abstractmethod + async def shutdown(self) -> None: + pass # pragma: no cover + + async def cleanup(self) -> None: + loop = asyncio.get_event_loop() + + # The loop over sites is intentional, an exception on gather() + # leaves self._sites in unpredictable state. + # The loop guaranties that a site is either deleted on success or + # still present on failure + for site in list(self._sites): + await site.stop() + await self._cleanup_server() + self._server = None + if self._handle_signals: + try: + loop.remove_signal_handler(signal.SIGINT) + loop.remove_signal_handler(signal.SIGTERM) + except NotImplementedError: # pragma: no cover + # remove_signal_handler is not implemented on Windows + pass + + @abstractmethod + async def _make_server(self) -> Server: + pass # pragma: no cover + + @abstractmethod + async def _cleanup_server(self) -> None: + pass # pragma: no cover + + def _reg_site(self, site: BaseSite) -> None: + if site in self._sites: + raise RuntimeError(f"Site {site} is already registered in runner {self}") + self._sites.append(site) + + def _check_site(self, site: BaseSite) -> None: + if site not in self._sites: + raise RuntimeError(f"Site {site} is not registered in runner {self}") + + def _unreg_site(self, site: BaseSite) -> None: + if site not in self._sites: + raise RuntimeError(f"Site {site} is not registered in runner {self}") + self._sites.remove(site) + + +class ServerRunner(BaseRunner): + """Low-level web server runner""" + + __slots__ = ("_web_server",) + + def __init__( + self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any + ) -> None: + super().__init__(handle_signals=handle_signals, **kwargs) + self._web_server = web_server + + async def shutdown(self) -> None: + pass + + async def _make_server(self) -> Server: + return self._web_server + + async def _cleanup_server(self) -> None: + pass + + +class AppRunner(BaseRunner): + """Web Application runner""" + + __slots__ = ("_app",) + + def __init__( + self, app: Application, *, handle_signals: bool = False, **kwargs: Any + ) -> None: + super().__init__(handle_signals=handle_signals, **kwargs) + if not isinstance(app, Application): + raise TypeError( + "The first argument should be web.Application " + "instance, got {!r}".format(app) + ) + self._app = app + + @property + def app(self) -> Application: + return self._app + + async def shutdown(self) -> None: + await self._app.shutdown() + + async def _make_server(self) -> Server: + loop = asyncio.get_event_loop() + self._app._set_loop(loop) + self._app.on_startup.freeze() + await self._app.startup() + self._app.freeze() + + return self._app._make_handler(loop=loop, **self._kwargs) + + async def _cleanup_server(self) -> None: + await self._app.cleanup() diff --git a/venv/lib/python3.10/site-packages/aiohttp/web_server.py b/venv/lib/python3.10/site-packages/aiohttp/web_server.py new file mode 100644 index 0000000..fa46e90 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web_server.py @@ -0,0 +1,62 @@ +"""Low level HTTP server.""" +import asyncio +from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa + +from .abc import AbstractStreamWriter +from .helpers import get_running_loop +from .http_parser import RawRequestMessage +from .streams import StreamReader +from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler +from .web_request import BaseRequest + +__all__ = ("Server",) + + +class Server: + def __init__( + self, + handler: _RequestHandler, + *, + request_factory: Optional[_RequestFactory] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Any + ) -> None: + self._loop = get_running_loop(loop) + self._connections: Dict[RequestHandler, asyncio.Transport] = {} + self._kwargs = kwargs + self.requests_count = 0 + self.request_handler = handler + self.request_factory = request_factory or self._make_request + + @property + def connections(self) -> List[RequestHandler]: + return list(self._connections.keys()) + + def connection_made( + self, handler: RequestHandler, transport: asyncio.Transport + ) -> None: + self._connections[handler] = transport + + def connection_lost( + self, handler: RequestHandler, exc: Optional[BaseException] = None + ) -> None: + if handler in self._connections: + del self._connections[handler] + + def _make_request( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: RequestHandler, + writer: AbstractStreamWriter, + task: "asyncio.Task[None]", + ) -> BaseRequest: + return BaseRequest(message, payload, protocol, writer, task, self._loop) + + async def shutdown(self, timeout: Optional[float] = None) -> None: + coros = [conn.shutdown(timeout) for conn in self._connections] + await asyncio.gather(*coros) + self._connections.clear() + + def __call__(self) -> RequestHandler: + return RequestHandler(self, loop=self._loop, **self._kwargs) diff --git a/venv/lib/python3.10/site-packages/aiohttp/web_urldispatcher.py b/venv/lib/python3.10/site-packages/aiohttp/web_urldispatcher.py new file mode 100644 index 0000000..5942e35 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web_urldispatcher.py @@ -0,0 +1,1220 @@ +import abc +import asyncio +import base64 +import hashlib +import inspect +import keyword +import os +import re +import warnings +from contextlib import contextmanager +from functools import wraps +from pathlib import Path +from types import MappingProxyType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Container, + Dict, + Generator, + Iterable, + Iterator, + List, + Mapping, + Optional, + Pattern, + Set, + Sized, + Tuple, + Type, + Union, + cast, +) + +from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined] + +from . import hdrs +from .abc import AbstractMatchInfo, AbstractRouter, AbstractView +from .helpers import DEBUG +from .http import HttpVersion11 +from .typedefs import Final, Handler, PathLike, TypedDict +from .web_exceptions import ( + HTTPException, + HTTPExpectationFailed, + HTTPForbidden, + HTTPMethodNotAllowed, + HTTPNotFound, +) +from .web_fileresponse import FileResponse +from .web_request import Request +from .web_response import Response, StreamResponse +from .web_routedef import AbstractRouteDef + +__all__ = ( + "UrlDispatcher", + "UrlMappingMatchInfo", + "AbstractResource", + "Resource", + "PlainResource", + "DynamicResource", + "AbstractRoute", + "ResourceRoute", + "StaticResource", + "View", +) + + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application + + BaseDict = Dict[str, str] +else: + BaseDict = dict + +YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) + +HTTP_METHOD_RE: Final[Pattern[str]] = re.compile( + r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$" +) +ROUTE_RE: Final[Pattern[str]] = re.compile( + r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})" +) +PATH_SEP: Final[str] = re.escape("/") + + +_ExpectHandler = Callable[[Request], Awaitable[None]] +_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] + + +class _InfoDict(TypedDict, total=False): + path: str + + formatter: str + pattern: Pattern[str] + + directory: Path + prefix: str + routes: Mapping[str, "AbstractRoute"] + + app: "Application" + + domain: str + + rule: "AbstractRuleMatching" + + http_exception: HTTPException + + +class AbstractResource(Sized, Iterable["AbstractRoute"]): + def __init__(self, *, name: Optional[str] = None) -> None: + self._name = name + + @property + def name(self) -> Optional[str]: + return self._name + + @property + @abc.abstractmethod + def canonical(self) -> str: + """Exposes the resource's canonical path. + + For example '/foo/bar/{name}' + + """ + + @abc.abstractmethod # pragma: no branch + def url_for(self, **kwargs: str) -> URL: + """Construct url for resource with additional params.""" + + @abc.abstractmethod # pragma: no branch + async def resolve(self, request: Request) -> _Resolve: + """Resolve resource. + + Return (UrlMappingMatchInfo, allowed_methods) pair. + """ + + @abc.abstractmethod + def add_prefix(self, prefix: str) -> None: + """Add a prefix to processed URLs. + + Required for subapplications support. + """ + + @abc.abstractmethod + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + def freeze(self) -> None: + pass + + @abc.abstractmethod + def raw_match(self, path: str) -> bool: + """Perform a raw match against path""" + + +class AbstractRoute(abc.ABC): + def __init__( + self, + method: str, + handler: Union[Handler, Type[AbstractView]], + *, + expect_handler: Optional[_ExpectHandler] = None, + resource: Optional[AbstractResource] = None, + ) -> None: + + if expect_handler is None: + expect_handler = _default_expect_handler + + assert asyncio.iscoroutinefunction( + expect_handler + ), f"Coroutine is expected, got {expect_handler!r}" + + method = method.upper() + if not HTTP_METHOD_RE.match(method): + raise ValueError(f"{method} is not allowed HTTP method") + + assert callable(handler), handler + if asyncio.iscoroutinefunction(handler): + pass + elif inspect.isgeneratorfunction(handler): + warnings.warn( + "Bare generators are deprecated, " "use @coroutine wrapper", + DeprecationWarning, + ) + elif isinstance(handler, type) and issubclass(handler, AbstractView): + pass + else: + warnings.warn( + "Bare functions are deprecated, " "use async ones", DeprecationWarning + ) + + @wraps(handler) + async def handler_wrapper(request: Request) -> StreamResponse: + result = old_handler(request) + if asyncio.iscoroutine(result): + return await result + return result # type: ignore[return-value] + + old_handler = handler + handler = handler_wrapper + + self._method = method + self._handler = handler + self._expect_handler = expect_handler + self._resource = resource + + @property + def method(self) -> str: + return self._method + + @property + def handler(self) -> Handler: + return self._handler + + @property + @abc.abstractmethod + def name(self) -> Optional[str]: + """Optional route's name, always equals to resource's name.""" + + @property + def resource(self) -> Optional[AbstractResource]: + return self._resource + + @abc.abstractmethod + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + @abc.abstractmethod # pragma: no branch + def url_for(self, *args: str, **kwargs: str) -> URL: + """Construct url for route with additional params.""" + + async def handle_expect_header(self, request: Request) -> None: + await self._expect_handler(request) + + +class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo): + def __init__(self, match_dict: Dict[str, str], route: AbstractRoute): + super().__init__(match_dict) + self._route = route + self._apps: List[Application] = [] + self._current_app: Optional[Application] = None + self._frozen = False + + @property + def handler(self) -> Handler: + return self._route.handler + + @property + def route(self) -> AbstractRoute: + return self._route + + @property + def expect_handler(self) -> _ExpectHandler: + return self._route.handle_expect_header + + @property + def http_exception(self) -> Optional[HTTPException]: + return None + + def get_info(self) -> _InfoDict: # type: ignore[override] + return self._route.get_info() + + @property + def apps(self) -> Tuple["Application", ...]: + return tuple(self._apps) + + def add_app(self, app: "Application") -> None: + if self._frozen: + raise RuntimeError("Cannot change apps stack after .freeze() call") + if self._current_app is None: + self._current_app = app + self._apps.insert(0, app) + + @property + def current_app(self) -> "Application": + app = self._current_app + assert app is not None + return app + + @contextmanager + def set_current_app(self, app: "Application") -> Generator[None, None, None]: + if DEBUG: # pragma: no cover + if app not in self._apps: + raise RuntimeError( + "Expected one of the following apps {!r}, got {!r}".format( + self._apps, app + ) + ) + prev = self._current_app + self._current_app = app + try: + yield + finally: + self._current_app = prev + + def freeze(self) -> None: + self._frozen = True + + def __repr__(self) -> str: + return f"" + + +class MatchInfoError(UrlMappingMatchInfo): + def __init__(self, http_exception: HTTPException) -> None: + self._exception = http_exception + super().__init__({}, SystemRoute(self._exception)) + + @property + def http_exception(self) -> HTTPException: + return self._exception + + def __repr__(self) -> str: + return "".format( + self._exception.status, self._exception.reason + ) + + +async def _default_expect_handler(request: Request) -> None: + """Default handler for Expect header. + + Just send "100 Continue" to client. + raise HTTPExpectationFailed if value of header is not "100-continue" + """ + expect = request.headers.get(hdrs.EXPECT, "") + if request.version == HttpVersion11: + if expect.lower() == "100-continue": + await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") + else: + raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) + + +class Resource(AbstractResource): + def __init__(self, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + self._routes: List[ResourceRoute] = [] + + def add_route( + self, + method: str, + handler: Union[Type[AbstractView], Handler], + *, + expect_handler: Optional[_ExpectHandler] = None, + ) -> "ResourceRoute": + + for route_obj in self._routes: + if route_obj.method == method or route_obj.method == hdrs.METH_ANY: + raise RuntimeError( + "Added route will never be executed, " + "method {route.method} is already " + "registered".format(route=route_obj) + ) + + route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler) + self.register_route(route_obj) + return route_obj + + def register_route(self, route: "ResourceRoute") -> None: + assert isinstance( + route, ResourceRoute + ), f"Instance of Route class is required, got {route!r}" + self._routes.append(route) + + async def resolve(self, request: Request) -> _Resolve: + allowed_methods: Set[str] = set() + + match_dict = self._match(request.rel_url.raw_path) + if match_dict is None: + return None, allowed_methods + + for route_obj in self._routes: + route_method = route_obj.method + allowed_methods.add(route_method) + + if route_method == request.method or route_method == hdrs.METH_ANY: + return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods) + else: + return None, allowed_methods + + @abc.abstractmethod + def _match(self, path: str) -> Optional[Dict[str, str]]: + pass # pragma: no cover + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._routes) + + # TODO: implement all abstract methods + + +class PlainResource(Resource): + def __init__(self, path: str, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + assert not path or path.startswith("/") + self._path = path + + @property + def canonical(self) -> str: + return self._path + + def freeze(self) -> None: + if not self._path: + self._path = "/" + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._path = prefix + self._path + + def _match(self, path: str) -> Optional[Dict[str, str]]: + # string comparison is about 10 times faster than regexp matching + if self._path == path: + return {} + else: + return None + + def raw_match(self, path: str) -> bool: + return self._path == path + + def get_info(self) -> _InfoDict: + return {"path": self._path} + + def url_for(self) -> URL: # type: ignore[override] + return URL.build(path=self._path, encoded=True) + + def __repr__(self) -> str: + name = "'" + self.name + "' " if self.name is not None else "" + return f"" + + +class DynamicResource(Resource): + + DYN = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*)\}") + DYN_WITH_RE = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*):(?P.+)\}") + GOOD = r"[^{}/]+" + + def __init__(self, path: str, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + pattern = "" + formatter = "" + for part in ROUTE_RE.split(path): + match = self.DYN.fullmatch(part) + if match: + pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD) + formatter += "{" + match.group("var") + "}" + continue + + match = self.DYN_WITH_RE.fullmatch(part) + if match: + pattern += "(?P<{var}>{re})".format(**match.groupdict()) + formatter += "{" + match.group("var") + "}" + continue + + if "{" in part or "}" in part: + raise ValueError(f"Invalid path '{path}'['{part}']") + + part = _requote_path(part) + formatter += part + pattern += re.escape(part) + + try: + compiled = re.compile(pattern) + except re.error as exc: + raise ValueError(f"Bad pattern '{pattern}': {exc}") from None + assert compiled.pattern.startswith(PATH_SEP) + assert formatter.startswith("/") + self._pattern = compiled + self._formatter = formatter + + @property + def canonical(self) -> str: + return self._formatter + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern) + self._formatter = prefix + self._formatter + + def _match(self, path: str) -> Optional[Dict[str, str]]: + match = self._pattern.fullmatch(path) + if match is None: + return None + else: + return { + key: _unquote_path(value) for key, value in match.groupdict().items() + } + + def raw_match(self, path: str) -> bool: + return self._formatter == path + + def get_info(self) -> _InfoDict: + return {"formatter": self._formatter, "pattern": self._pattern} + + def url_for(self, **parts: str) -> URL: + url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()}) + return URL.build(path=url, encoded=True) + + def __repr__(self) -> str: + name = "'" + self.name + "' " if self.name is not None else "" + return "".format( + name=name, formatter=self._formatter + ) + + +class PrefixResource(AbstractResource): + def __init__(self, prefix: str, *, name: Optional[str] = None) -> None: + assert not prefix or prefix.startswith("/"), prefix + assert prefix in ("", "/") or not prefix.endswith("/"), prefix + super().__init__(name=name) + self._prefix = _requote_path(prefix) + self._prefix2 = self._prefix + "/" + + @property + def canonical(self) -> str: + return self._prefix + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._prefix = prefix + self._prefix + self._prefix2 = self._prefix + "/" + + def raw_match(self, prefix: str) -> bool: + return False + + # TODO: impl missing abstract methods + + +class StaticResource(PrefixResource): + VERSION_KEY = "v" + + def __init__( + self, + prefix: str, + directory: PathLike, + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + chunk_size: int = 256 * 1024, + show_index: bool = False, + follow_symlinks: bool = False, + append_version: bool = False, + ) -> None: + super().__init__(prefix, name=name) + try: + directory = Path(directory) + if str(directory).startswith("~"): + directory = Path(os.path.expanduser(str(directory))) + directory = directory.resolve() + if not directory.is_dir(): + raise ValueError("Not a directory") + except (FileNotFoundError, ValueError) as error: + raise ValueError(f"No directory exists at '{directory}'") from error + self._directory = directory + self._show_index = show_index + self._chunk_size = chunk_size + self._follow_symlinks = follow_symlinks + self._expect_handler = expect_handler + self._append_version = append_version + + self._routes = { + "GET": ResourceRoute( + "GET", self._handle, self, expect_handler=expect_handler + ), + "HEAD": ResourceRoute( + "HEAD", self._handle, self, expect_handler=expect_handler + ), + } + + def url_for( # type: ignore[override] + self, + *, + filename: Union[str, Path], + append_version: Optional[bool] = None, + ) -> URL: + if append_version is None: + append_version = self._append_version + if isinstance(filename, Path): + filename = str(filename) + filename = filename.lstrip("/") + + url = URL.build(path=self._prefix, encoded=True) + # filename is not encoded + if YARL_VERSION < (1, 6): + url = url / filename.replace("%", "%25") + else: + url = url / filename + + if append_version: + try: + filepath = self._directory.joinpath(filename).resolve() + if not self._follow_symlinks: + filepath.relative_to(self._directory) + except (ValueError, FileNotFoundError): + # ValueError for case when path point to symlink + # with follow_symlinks is False + return url # relatively safe + if filepath.is_file(): + # TODO cache file content + # with file watcher for cache invalidation + with filepath.open("rb") as f: + file_bytes = f.read() + h = self._get_file_hash(file_bytes) + url = url.with_query({self.VERSION_KEY: h}) + return url + return url + + @staticmethod + def _get_file_hash(byte_array: bytes) -> str: + m = hashlib.sha256() # todo sha256 can be configurable param + m.update(byte_array) + b64 = base64.urlsafe_b64encode(m.digest()) + return b64.decode("ascii") + + def get_info(self) -> _InfoDict: + return { + "directory": self._directory, + "prefix": self._prefix, + "routes": self._routes, + } + + def set_options_route(self, handler: Handler) -> None: + if "OPTIONS" in self._routes: + raise RuntimeError("OPTIONS route was set already") + self._routes["OPTIONS"] = ResourceRoute( + "OPTIONS", handler, self, expect_handler=self._expect_handler + ) + + async def resolve(self, request: Request) -> _Resolve: + path = request.rel_url.raw_path + method = request.method + allowed_methods = set(self._routes) + if not path.startswith(self._prefix2) and path != self._prefix: + return None, set() + + if method not in allowed_methods: + return None, allowed_methods + + match_dict = {"filename": _unquote_path(path[len(self._prefix) + 1 :])} + return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods) + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._routes.values()) + + async def _handle(self, request: Request) -> StreamResponse: + rel_url = request.match_info["filename"] + try: + filename = Path(rel_url) + if filename.anchor: + # rel_url is an absolute name like + # /static/\\machine_name\c$ or /static/D:\path + # where the static dir is totally different + raise HTTPForbidden() + filepath = self._directory.joinpath(filename).resolve() + if not self._follow_symlinks: + filepath.relative_to(self._directory) + except (ValueError, FileNotFoundError) as error: + # relatively safe + raise HTTPNotFound() from error + except HTTPForbidden: + raise + except Exception as error: + # perm error or other kind! + request.app.logger.exception(error) + raise HTTPNotFound() from error + + # on opening a dir, load its contents if allowed + if filepath.is_dir(): + if self._show_index: + try: + return Response( + text=self._directory_as_html(filepath), content_type="text/html" + ) + except PermissionError: + raise HTTPForbidden() + else: + raise HTTPForbidden() + elif filepath.is_file(): + return FileResponse(filepath, chunk_size=self._chunk_size) + else: + raise HTTPNotFound + + def _directory_as_html(self, filepath: Path) -> str: + # returns directory's index as html + + # sanity check + assert filepath.is_dir() + + relative_path_to_dir = filepath.relative_to(self._directory).as_posix() + index_of = f"Index of /{relative_path_to_dir}" + h1 = f"

{index_of}

" + + index_list = [] + dir_index = filepath.iterdir() + for _file in sorted(dir_index): + # show file url as relative to static path + rel_path = _file.relative_to(self._directory).as_posix() + file_url = self._prefix + "/" + rel_path + + # if file is a directory, add '/' to the end of the name + if _file.is_dir(): + file_name = f"{_file.name}/" + else: + file_name = _file.name + + index_list.append( + '
  • {name}
  • '.format( + url=file_url, name=file_name + ) + ) + ul = "
      \n{}\n
    ".format("\n".join(index_list)) + body = f"\n{h1}\n{ul}\n" + + head_str = f"\n{index_of}\n" + html = f"\n{head_str}\n{body}\n" + + return html + + def __repr__(self) -> str: + name = "'" + self.name + "'" if self.name is not None else "" + return " {directory!r}>".format( + name=name, path=self._prefix, directory=self._directory + ) + + +class PrefixedSubAppResource(PrefixResource): + def __init__(self, prefix: str, app: "Application") -> None: + super().__init__(prefix) + self._app = app + for resource in app.router.resources(): + resource.add_prefix(prefix) + + def add_prefix(self, prefix: str) -> None: + super().add_prefix(prefix) + for resource in self._app.router.resources(): + resource.add_prefix(prefix) + + def url_for(self, *args: str, **kwargs: str) -> URL: + raise RuntimeError(".url_for() is not supported " "by sub-application root") + + def get_info(self) -> _InfoDict: + return {"app": self._app, "prefix": self._prefix} + + async def resolve(self, request: Request) -> _Resolve: + if ( + not request.url.raw_path.startswith(self._prefix2) + and request.url.raw_path != self._prefix + ): + return None, set() + match_info = await self._app.router.resolve(request) + match_info.add_app(self._app) + if isinstance(match_info.http_exception, HTTPMethodNotAllowed): + methods = match_info.http_exception.allowed_methods + else: + methods = set() + return match_info, methods + + def __len__(self) -> int: + return len(self._app.router.routes()) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._app.router.routes()) + + def __repr__(self) -> str: + return " {app!r}>".format( + prefix=self._prefix, app=self._app + ) + + +class AbstractRuleMatching(abc.ABC): + @abc.abstractmethod # pragma: no branch + async def match(self, request: Request) -> bool: + """Return bool if the request satisfies the criteria""" + + @abc.abstractmethod # pragma: no branch + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + @property + @abc.abstractmethod # pragma: no branch + def canonical(self) -> str: + """Return a str""" + + +class Domain(AbstractRuleMatching): + re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(? None: + super().__init__() + self._domain = self.validation(domain) + + @property + def canonical(self) -> str: + return self._domain + + def validation(self, domain: str) -> str: + if not isinstance(domain, str): + raise TypeError("Domain must be str") + domain = domain.rstrip(".").lower() + if not domain: + raise ValueError("Domain cannot be empty") + elif "://" in domain: + raise ValueError("Scheme not supported") + url = URL("http://" + domain) + assert url.raw_host is not None + if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")): + raise ValueError("Domain not valid") + if url.port == 80: + return url.raw_host + return f"{url.raw_host}:{url.port}" + + async def match(self, request: Request) -> bool: + host = request.headers.get(hdrs.HOST) + if not host: + return False + return self.match_domain(host) + + def match_domain(self, host: str) -> bool: + return host.lower() == self._domain + + def get_info(self) -> _InfoDict: + return {"domain": self._domain} + + +class MaskDomain(Domain): + re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(? None: + super().__init__(domain) + mask = self._domain.replace(".", r"\.").replace("*", ".*") + self._mask = re.compile(mask) + + @property + def canonical(self) -> str: + return self._mask.pattern + + def match_domain(self, host: str) -> bool: + return self._mask.fullmatch(host) is not None + + +class MatchedSubAppResource(PrefixedSubAppResource): + def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None: + AbstractResource.__init__(self) + self._prefix = "" + self._app = app + self._rule = rule + + @property + def canonical(self) -> str: + return self._rule.canonical + + def get_info(self) -> _InfoDict: + return {"app": self._app, "rule": self._rule} + + async def resolve(self, request: Request) -> _Resolve: + if not await self._rule.match(request): + return None, set() + match_info = await self._app.router.resolve(request) + match_info.add_app(self._app) + if isinstance(match_info.http_exception, HTTPMethodNotAllowed): + methods = match_info.http_exception.allowed_methods + else: + methods = set() + return match_info, methods + + def __repr__(self) -> str: + return " {app!r}>" "".format(app=self._app) + + +class ResourceRoute(AbstractRoute): + """A route with resource""" + + def __init__( + self, + method: str, + handler: Union[Handler, Type[AbstractView]], + resource: AbstractResource, + *, + expect_handler: Optional[_ExpectHandler] = None, + ) -> None: + super().__init__( + method, handler, expect_handler=expect_handler, resource=resource + ) + + def __repr__(self) -> str: + return " {handler!r}".format( + method=self.method, resource=self._resource, handler=self.handler + ) + + @property + def name(self) -> Optional[str]: + if self._resource is None: + return None + return self._resource.name + + def url_for(self, *args: str, **kwargs: str) -> URL: + """Construct url for route with additional params.""" + assert self._resource is not None + return self._resource.url_for(*args, **kwargs) + + def get_info(self) -> _InfoDict: + assert self._resource is not None + return self._resource.get_info() + + +class SystemRoute(AbstractRoute): + def __init__(self, http_exception: HTTPException) -> None: + super().__init__(hdrs.METH_ANY, self._handle) + self._http_exception = http_exception + + def url_for(self, *args: str, **kwargs: str) -> URL: + raise RuntimeError(".url_for() is not allowed for SystemRoute") + + @property + def name(self) -> Optional[str]: + return None + + def get_info(self) -> _InfoDict: + return {"http_exception": self._http_exception} + + async def _handle(self, request: Request) -> StreamResponse: + raise self._http_exception + + @property + def status(self) -> int: + return self._http_exception.status + + @property + def reason(self) -> str: + return self._http_exception.reason + + def __repr__(self) -> str: + return "".format(self=self) + + +class View(AbstractView): + async def _iter(self) -> StreamResponse: + if self.request.method not in hdrs.METH_ALL: + self._raise_allowed_methods() + method: Callable[[], Awaitable[StreamResponse]] = getattr( + self, self.request.method.lower(), None + ) + if method is None: + self._raise_allowed_methods() + resp = await method() + return resp + + def __await__(self) -> Generator[Any, None, StreamResponse]: + return self._iter().__await__() + + def _raise_allowed_methods(self) -> None: + allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())} + raise HTTPMethodNotAllowed(self.request.method, allowed_methods) + + +class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]): + def __init__(self, resources: List[AbstractResource]) -> None: + self._resources = resources + + def __len__(self) -> int: + return len(self._resources) + + def __iter__(self) -> Iterator[AbstractResource]: + yield from self._resources + + def __contains__(self, resource: object) -> bool: + return resource in self._resources + + +class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]): + def __init__(self, resources: List[AbstractResource]): + self._routes: List[AbstractRoute] = [] + for resource in resources: + for route in resource: + self._routes.append(route) + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + yield from self._routes + + def __contains__(self, route: object) -> bool: + return route in self._routes + + +class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]): + + NAME_SPLIT_RE = re.compile(r"[.:-]") + + def __init__(self) -> None: + super().__init__() + self._resources: List[AbstractResource] = [] + self._named_resources: Dict[str, AbstractResource] = {} + + async def resolve(self, request: Request) -> UrlMappingMatchInfo: + method = request.method + allowed_methods: Set[str] = set() + + for resource in self._resources: + match_dict, allowed = await resource.resolve(request) + if match_dict is not None: + return match_dict + else: + allowed_methods |= allowed + + if allowed_methods: + return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods)) + else: + return MatchInfoError(HTTPNotFound()) + + def __iter__(self) -> Iterator[str]: + return iter(self._named_resources) + + def __len__(self) -> int: + return len(self._named_resources) + + def __contains__(self, resource: object) -> bool: + return resource in self._named_resources + + def __getitem__(self, name: str) -> AbstractResource: + return self._named_resources[name] + + def resources(self) -> ResourcesView: + return ResourcesView(self._resources) + + def routes(self) -> RoutesView: + return RoutesView(self._resources) + + def named_resources(self) -> Mapping[str, AbstractResource]: + return MappingProxyType(self._named_resources) + + def register_resource(self, resource: AbstractResource) -> None: + assert isinstance( + resource, AbstractResource + ), f"Instance of AbstractResource class is required, got {resource!r}" + if self.frozen: + raise RuntimeError("Cannot register a resource into frozen router.") + + name = resource.name + + if name is not None: + parts = self.NAME_SPLIT_RE.split(name) + for part in parts: + if keyword.iskeyword(part): + raise ValueError( + f"Incorrect route name {name!r}, " + "python keywords cannot be used " + "for route name" + ) + if not part.isidentifier(): + raise ValueError( + "Incorrect route name {!r}, " + "the name should be a sequence of " + "python identifiers separated " + "by dash, dot or column".format(name) + ) + if name in self._named_resources: + raise ValueError( + "Duplicate {!r}, " + "already handled by {!r}".format(name, self._named_resources[name]) + ) + self._named_resources[name] = resource + self._resources.append(resource) + + def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: + if path and not path.startswith("/"): + raise ValueError("path should be started with / or be empty") + # Reuse last added resource if path and name are the same + if self._resources: + resource = self._resources[-1] + if resource.name == name and resource.raw_match(path): + return cast(Resource, resource) + if not ("{" in path or "}" in path or ROUTE_RE.search(path)): + resource = PlainResource(_requote_path(path), name=name) + self.register_resource(resource) + return resource + resource = DynamicResource(path, name=name) + self.register_resource(resource) + return resource + + def add_route( + self, + method: str, + path: str, + handler: Union[Handler, Type[AbstractView]], + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + ) -> AbstractRoute: + resource = self.add_resource(path, name=name) + return resource.add_route(method, handler, expect_handler=expect_handler) + + def add_static( + self, + prefix: str, + path: PathLike, + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + chunk_size: int = 256 * 1024, + show_index: bool = False, + follow_symlinks: bool = False, + append_version: bool = False, + ) -> AbstractResource: + """Add static files view. + + prefix - url prefix + path - folder with files + + """ + assert prefix.startswith("/") + if prefix.endswith("/"): + prefix = prefix[:-1] + resource = StaticResource( + prefix, + path, + name=name, + expect_handler=expect_handler, + chunk_size=chunk_size, + show_index=show_index, + follow_symlinks=follow_symlinks, + append_version=append_version, + ) + self.register_resource(resource) + return resource + + def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method HEAD.""" + return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs) + + def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method OPTIONS.""" + return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs) + + def add_get( + self, + path: str, + handler: Handler, + *, + name: Optional[str] = None, + allow_head: bool = True, + **kwargs: Any, + ) -> AbstractRoute: + """Shortcut for add_route with method GET. + + If allow_head is true, another + route is added allowing head requests to the same endpoint. + """ + resource = self.add_resource(path, name=name) + if allow_head: + resource.add_route(hdrs.METH_HEAD, handler, **kwargs) + return resource.add_route(hdrs.METH_GET, handler, **kwargs) + + def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method POST.""" + return self.add_route(hdrs.METH_POST, path, handler, **kwargs) + + def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method PUT.""" + return self.add_route(hdrs.METH_PUT, path, handler, **kwargs) + + def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method PATCH.""" + return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs) + + def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method DELETE.""" + return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs) + + def add_view( + self, path: str, handler: Type[AbstractView], **kwargs: Any + ) -> AbstractRoute: + """Shortcut for add_route with ANY methods for a class-based view.""" + return self.add_route(hdrs.METH_ANY, path, handler, **kwargs) + + def freeze(self) -> None: + super().freeze() + for resource in self._resources: + resource.freeze() + + def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: + """Append routes to route table. + + Parameter should be a sequence of RouteDef objects. + + Returns a list of registered AbstractRoute instances. + """ + registered_routes = [] + for route_def in routes: + registered_routes.extend(route_def.register(self)) + return registered_routes + + +def _quote_path(value: str) -> str: + if YARL_VERSION < (1, 6): + value = value.replace("%", "%25") + return URL.build(path=value, encoded=False).raw_path + + +def _unquote_path(value: str) -> str: + return URL.build(path=value, encoded=True).path + + +def _requote_path(value: str) -> str: + # Quote non-ascii characters and other characters which must be quoted, + # but preserve existing %-sequences. + result = _quote_path(value) + if "%" in value: + result = result.replace("%25", "%") + return result diff --git a/venv/lib/python3.10/site-packages/aiohttp/web_ws.py b/venv/lib/python3.10/site-packages/aiohttp/web_ws.py new file mode 100644 index 0000000..0d32a21 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/web_ws.py @@ -0,0 +1,487 @@ +import asyncio +import base64 +import binascii +import hashlib +import json +from typing import Any, Iterable, Optional, Tuple, cast + +import async_timeout +import attr +from multidict import CIMultiDict + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import call_later, set_result +from .http import ( + WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE, + WS_KEY, + WebSocketError, + WebSocketReader, + WebSocketWriter, + WSCloseCode, + WSMessage, + WSMsgType as WSMsgType, + ws_ext_gen, + ws_ext_parse, +) +from .log import ws_logger +from .streams import EofStream, FlowControlDataQueue +from .typedefs import Final, JSONDecoder, JSONEncoder +from .web_exceptions import HTTPBadRequest, HTTPException +from .web_request import BaseRequest +from .web_response import StreamResponse + +__all__ = ( + "WebSocketResponse", + "WebSocketReady", + "WSMsgType", +) + +THRESHOLD_CONNLOST_ACCESS: Final[int] = 5 + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class WebSocketReady: + ok: bool + protocol: Optional[str] + + def __bool__(self) -> bool: + return self.ok + + +class WebSocketResponse(StreamResponse): + + _length_check = False + + def __init__( + self, + *, + timeout: float = 10.0, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + protocols: Iterable[str] = (), + compress: bool = True, + max_msg_size: int = 4 * 1024 * 1024, + ) -> None: + super().__init__(status=101) + self._protocols = protocols + self._ws_protocol: Optional[str] = None + self._writer: Optional[WebSocketWriter] = None + self._reader: Optional[FlowControlDataQueue[WSMessage]] = None + self._closed = False + self._closing = False + self._conn_lost = 0 + self._close_code: Optional[int] = None + self._loop: Optional[asyncio.AbstractEventLoop] = None + self._waiting: Optional[asyncio.Future[bool]] = None + self._exception: Optional[BaseException] = None + self._timeout = timeout + self._receive_timeout = receive_timeout + self._autoclose = autoclose + self._autoping = autoping + self._heartbeat = heartbeat + self._heartbeat_cb: Optional[asyncio.TimerHandle] = None + if heartbeat is not None: + self._pong_heartbeat = heartbeat / 2.0 + self._pong_response_cb: Optional[asyncio.TimerHandle] = None + self._compress = compress + self._max_msg_size = max_msg_size + + def _cancel_heartbeat(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None + + if self._heartbeat_cb is not None: + self._heartbeat_cb.cancel() + self._heartbeat_cb = None + + def _reset_heartbeat(self) -> None: + self._cancel_heartbeat() + + if self._heartbeat is not None: + assert self._loop is not None + self._heartbeat_cb = call_later( + self._send_heartbeat, self._heartbeat, self._loop + ) + + def _send_heartbeat(self) -> None: + if self._heartbeat is not None and not self._closed: + assert self._loop is not None + # fire-and-forget a task is not perfect but maybe ok for + # sending ping. Otherwise we need a long-living heartbeat + # task in the class. + self._loop.create_task(self._writer.ping()) # type: ignore[union-attr] + + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = call_later( + self._pong_not_received, self._pong_heartbeat, self._loop + ) + + def _pong_not_received(self) -> None: + if self._req is not None and self._req.transport is not None: + self._closed = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = asyncio.TimeoutError() + self._req.transport.close() + + async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: + # make pre-check to don't hide it by do_handshake() exceptions + if self._payload_writer is not None: + return self._payload_writer + + protocol, writer = self._pre_start(request) + payload_writer = await super().prepare(request) + assert payload_writer is not None + self._post_start(request, protocol, writer) + await payload_writer.drain() + return payload_writer + + def _handshake( + self, request: BaseRequest + ) -> Tuple["CIMultiDict[str]", str, bool, bool]: + headers = request.headers + if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip(): + raise HTTPBadRequest( + text=( + "No WebSocket UPGRADE hdr: {}\n Can " + '"Upgrade" only to "WebSocket".' + ).format(headers.get(hdrs.UPGRADE)) + ) + + if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower(): + raise HTTPBadRequest( + text="No CONNECTION upgrade hdr: {}".format( + headers.get(hdrs.CONNECTION) + ) + ) + + # find common sub-protocol between client and server + protocol = None + if hdrs.SEC_WEBSOCKET_PROTOCOL in headers: + req_protocols = [ + str(proto.strip()) + for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") + ] + + for proto in req_protocols: + if proto in self._protocols: + protocol = proto + break + else: + # No overlap found: Return no protocol as per spec + ws_logger.warning( + "Client protocols %r don’t overlap server-known ones %r", + req_protocols, + self._protocols, + ) + + # check supported version + version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "") + if version not in ("13", "8", "7"): + raise HTTPBadRequest(text=f"Unsupported version: {version}") + + # check client handshake for validity + key = headers.get(hdrs.SEC_WEBSOCKET_KEY) + try: + if not key or len(base64.b64decode(key)) != 16: + raise HTTPBadRequest(text=f"Handshake error: {key!r}") + except binascii.Error: + raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None + + accept_val = base64.b64encode( + hashlib.sha1(key.encode() + WS_KEY).digest() + ).decode() + response_headers = CIMultiDict( + { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: accept_val, + } + ) + + notakeover = False + compress = 0 + if self._compress: + extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) + # Server side always get return with no exception. + # If something happened, just drop compress extension + compress, notakeover = ws_ext_parse(extensions, isserver=True) + if compress: + enabledext = ws_ext_gen( + compress=compress, isserver=True, server_notakeover=notakeover + ) + response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext + + if protocol: + response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol + return ( + response_headers, + protocol, + compress, + notakeover, + ) # type: ignore[return-value] + + def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]: + self._loop = request._loop + + headers, protocol, compress, notakeover = self._handshake(request) + + self.set_status(101) + self.headers.update(headers) + self.force_close() + self._compress = compress + transport = request._protocol.transport + assert transport is not None + writer = WebSocketWriter( + request._protocol, transport, compress=compress, notakeover=notakeover + ) + + return protocol, writer + + def _post_start( + self, request: BaseRequest, protocol: str, writer: WebSocketWriter + ) -> None: + self._ws_protocol = protocol + self._writer = writer + + self._reset_heartbeat() + + loop = self._loop + assert loop is not None + self._reader = FlowControlDataQueue(request._protocol, 2**16, loop=loop) + request.protocol.set_parser( + WebSocketReader(self._reader, self._max_msg_size, compress=self._compress) + ) + # disable HTTP keepalive for WebSocket + request.protocol.keep_alive(False) + + def can_prepare(self, request: BaseRequest) -> WebSocketReady: + if self._writer is not None: + raise RuntimeError("Already started") + try: + _, protocol, _, _ = self._handshake(request) + except HTTPException: + return WebSocketReady(False, None) + else: + return WebSocketReady(True, protocol) + + @property + def closed(self) -> bool: + return self._closed + + @property + def close_code(self) -> Optional[int]: + return self._close_code + + @property + def ws_protocol(self) -> Optional[str]: + return self._ws_protocol + + @property + def compress(self) -> bool: + return self._compress + + def exception(self) -> Optional[BaseException]: + return self._exception + + async def ping(self, message: bytes = b"") -> None: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + await self._writer.ping(message) + + async def pong(self, message: bytes = b"") -> None: + # unsolicited pong + if self._writer is None: + raise RuntimeError("Call .prepare() first") + await self._writer.pong(message) + + async def send_str(self, data: str, compress: Optional[bool] = None) -> None: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + if not isinstance(data, str): + raise TypeError("data argument must be str (%r)" % type(data)) + await self._writer.send(data, binary=False, compress=compress) + + async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError("data argument must be byte-ish (%r)" % type(data)) + await self._writer.send(data, binary=True, compress=compress) + + async def send_json( + self, + data: Any, + compress: Optional[bool] = None, + *, + dumps: JSONEncoder = json.dumps, + ) -> None: + await self.send_str(dumps(data), compress=compress) + + async def write_eof(self) -> None: # type: ignore[override] + if self._eof_sent: + return + if self._payload_writer is None: + raise RuntimeError("Response has not been started") + + await self.close() + self._eof_sent = True + + async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + + self._cancel_heartbeat() + reader = self._reader + assert reader is not None + + # we need to break `receive()` cycle first, + # `close()` may be called from different task + if self._waiting is not None and not self._closed: + reader.feed_data(WS_CLOSING_MESSAGE, 0) + await self._waiting + + if not self._closed: + self._closed = True + try: + await self._writer.close(code, message) + writer = self._payload_writer + assert writer is not None + await writer.drain() + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + return True + + if self._closing: + return True + + reader = self._reader + assert reader is not None + try: + async with async_timeout.timeout(self._timeout): + msg = await reader.read() + except asyncio.CancelledError: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + return True + + if msg.type == WSMsgType.CLOSE: + self._close_code = msg.data + return True + + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = asyncio.TimeoutError() + return True + else: + return False + + async def receive(self, timeout: Optional[float] = None) -> WSMessage: + if self._reader is None: + raise RuntimeError("Call .prepare() first") + + loop = self._loop + assert loop is not None + while True: + if self._waiting is not None: + raise RuntimeError("Concurrent call to receive() is not allowed") + + if self._closed: + self._conn_lost += 1 + if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS: + raise RuntimeError("WebSocket connection is closed.") + return WS_CLOSED_MESSAGE + elif self._closing: + return WS_CLOSING_MESSAGE + + try: + self._waiting = loop.create_future() + try: + async with async_timeout.timeout(timeout or self._receive_timeout): + msg = await self._reader.read() + self._reset_heartbeat() + finally: + waiter = self._waiting + set_result(waiter, True) + self._waiting = None + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + raise + except EofStream: + self._close_code = WSCloseCode.OK + await self.close() + return WSMessage(WSMsgType.CLOSED, None, None) + except WebSocketError as exc: + self._close_code = exc.code + await self.close(code=exc.code) + return WSMessage(WSMsgType.ERROR, exc, None) + except Exception as exc: + self._exception = exc + self._closing = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + await self.close() + return WSMessage(WSMsgType.ERROR, exc, None) + + if msg.type == WSMsgType.CLOSE: + self._closing = True + self._close_code = msg.data + if not self._closed and self._autoclose: + await self.close() + elif msg.type == WSMsgType.CLOSING: + self._closing = True + elif msg.type == WSMsgType.PING and self._autoping: + await self.pong(msg.data) + continue + elif msg.type == WSMsgType.PONG and self._autoping: + continue + + return msg + + async def receive_str(self, *, timeout: Optional[float] = None) -> str: + msg = await self.receive(timeout) + if msg.type != WSMsgType.TEXT: + raise TypeError( + "Received message {}:{!r} is not WSMsgType.TEXT".format( + msg.type, msg.data + ) + ) + return cast(str, msg.data) + + async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: + msg = await self.receive(timeout) + if msg.type != WSMsgType.BINARY: + raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") + return cast(bytes, msg.data) + + async def receive_json( + self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None + ) -> Any: + data = await self.receive_str(timeout=timeout) + return loads(data) + + async def write(self, data: bytes) -> None: + raise RuntimeError("Cannot call .write() for websocket") + + def __aiter__(self) -> "WebSocketResponse": + return self + + async def __anext__(self) -> WSMessage: + msg = await self.receive() + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): + raise StopAsyncIteration + return msg + + def _cancel(self, exc: BaseException) -> None: + if self._reader is not None: + self._reader.set_exception(exc) diff --git a/venv/lib/python3.10/site-packages/aiohttp/worker.py b/venv/lib/python3.10/site-packages/aiohttp/worker.py new file mode 100644 index 0000000..f130289 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiohttp/worker.py @@ -0,0 +1,269 @@ +"""Async gunicorn worker for aiohttp.web""" + +import asyncio +import os +import re +import signal +import sys +from types import FrameType +from typing import Any, Awaitable, Callable, Optional, Union # noqa + +from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat +from gunicorn.workers import base + +from aiohttp import web + +from .helpers import set_result +from .web_app import Application +from .web_log import AccessLogger + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker", "GunicornTokioWebWorker") + + +class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported] + + DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT + DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default + + def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover + super().__init__(*args, **kw) + + self._task: Optional[asyncio.Task[None]] = None + self.exit_code = 0 + self._notify_waiter: Optional[asyncio.Future[bool]] = None + + def init_process(self) -> None: + # create new event_loop after fork + asyncio.get_event_loop().close() + + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + super().init_process() + + def run(self) -> None: + self._task = self.loop.create_task(self._run()) + + try: # ignore all finalization problems + self.loop.run_until_complete(self._task) + except Exception: + self.log.exception("Exception in gunicorn worker") + self.loop.run_until_complete(self.loop.shutdown_asyncgens()) + self.loop.close() + + sys.exit(self.exit_code) + + async def _run(self) -> None: + runner = None + if isinstance(self.wsgi, Application): + app = self.wsgi + elif asyncio.iscoroutinefunction(self.wsgi): + wsgi = await self.wsgi() + if isinstance(wsgi, web.AppRunner): + runner = wsgi + app = runner.app + else: + app = wsgi + else: + raise RuntimeError( + "wsgi app should be either Application or " + "async function returning Application, got {}".format(self.wsgi) + ) + + if runner is None: + access_log = self.log.access_log if self.cfg.accesslog else None + runner = web.AppRunner( + app, + logger=self.log, + keepalive_timeout=self.cfg.keepalive, + access_log=access_log, + access_log_format=self._get_valid_log_format( + self.cfg.access_log_format + ), + ) + await runner.setup() + + ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None + + runner = runner + assert runner is not None + server = runner.server + assert server is not None + for sock in self.sockets: + site = web.SockSite( + runner, + sock, + ssl_context=ctx, + shutdown_timeout=self.cfg.graceful_timeout / 100 * 95, + ) + await site.start() + + # If our parent changed then we shut down. + pid = os.getpid() + try: + while self.alive: # type: ignore[has-type] + self.notify() + + cnt = server.requests_count + if self.cfg.max_requests and cnt > self.cfg.max_requests: + self.alive = False + self.log.info("Max requests, shutting down: %s", self) + + elif pid == os.getpid() and self.ppid != os.getppid(): + self.alive = False + self.log.info("Parent changed, shutting down: %s", self) + else: + await self._wait_next_notify() + except BaseException: + pass + + await runner.cleanup() + + def _wait_next_notify(self) -> "asyncio.Future[bool]": + self._notify_waiter_done() + + loop = self.loop + assert loop is not None + self._notify_waiter = waiter = loop.create_future() + self.loop.call_later(1.0, self._notify_waiter_done, waiter) + + return waiter + + def _notify_waiter_done( + self, waiter: Optional["asyncio.Future[bool]"] = None + ) -> None: + if waiter is None: + waiter = self._notify_waiter + if waiter is not None: + set_result(waiter, True) + + if waiter is self._notify_waiter: + self._notify_waiter = None + + def init_signals(self) -> None: + # Set up signals through the event loop API. + + self.loop.add_signal_handler( + signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None + ) + + self.loop.add_signal_handler( + signal.SIGTERM, self.handle_exit, signal.SIGTERM, None + ) + + self.loop.add_signal_handler( + signal.SIGINT, self.handle_quit, signal.SIGINT, None + ) + + self.loop.add_signal_handler( + signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None + ) + + self.loop.add_signal_handler( + signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None + ) + + self.loop.add_signal_handler( + signal.SIGABRT, self.handle_abort, signal.SIGABRT, None + ) + + # Don't let SIGTERM and SIGUSR1 disturb active requests + # by interrupting system calls + signal.siginterrupt(signal.SIGTERM, False) + signal.siginterrupt(signal.SIGUSR1, False) + # Reset signals so Gunicorn doesn't swallow subprocess return codes + # See: https://github.com/aio-libs/aiohttp/issues/6130 + if sys.version_info < (3, 8): + # Starting from Python 3.8, + # the default child watcher is ThreadedChildWatcher. + # The watcher doesn't depend on SIGCHLD signal, + # there is no need to reset it. + signal.signal(signal.SIGCHLD, signal.SIG_DFL) + + def handle_quit(self, sig: int, frame: FrameType) -> None: + self.alive = False + + # worker_int callback + self.cfg.worker_int(self) + + # wakeup closing process + self._notify_waiter_done() + + def handle_abort(self, sig: int, frame: FrameType) -> None: + self.alive = False + self.exit_code = 1 + self.cfg.worker_abort(self) + sys.exit(1) + + @staticmethod + def _create_ssl_context(cfg: Any) -> "SSLContext": + """Creates SSLContext instance for usage in asyncio.create_server. + + See ssl.SSLSocket.__init__ for more details. + """ + if ssl is None: # pragma: no cover + raise RuntimeError("SSL is not supported.") + + ctx = ssl.SSLContext(cfg.ssl_version) + ctx.load_cert_chain(cfg.certfile, cfg.keyfile) + ctx.verify_mode = cfg.cert_reqs + if cfg.ca_certs: + ctx.load_verify_locations(cfg.ca_certs) + if cfg.ciphers: + ctx.set_ciphers(cfg.ciphers) + return ctx + + def _get_valid_log_format(self, source_format: str) -> str: + if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT: + return self.DEFAULT_AIOHTTP_LOG_FORMAT + elif re.search(r"%\([^\)]+\)", source_format): + raise ValueError( + "Gunicorn's style options in form of `%(name)s` are not " + "supported for the log formatting. Please use aiohttp's " + "format specification to configure access log formatting: " + "http://docs.aiohttp.org/en/stable/logging.html" + "#format-specification" + ) + else: + return source_format + + +class GunicornUVLoopWebWorker(GunicornWebWorker): + def init_process(self) -> None: + import uvloop + + # Close any existing event loop before setting a + # new policy. + asyncio.get_event_loop().close() + + # Setup uvloop policy, so that every + # asyncio.get_event_loop() will create an instance + # of uvloop event loop. + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) + + super().init_process() + + +class GunicornTokioWebWorker(GunicornWebWorker): + def init_process(self) -> None: # pragma: no cover + import tokio + + # Close any existing event loop before setting a + # new policy. + asyncio.get_event_loop().close() + + # Setup tokio policy, so that every + # asyncio.get_event_loop() will create an instance + # of tokio event loop. + asyncio.set_event_loop_policy(tokio.EventLoopPolicy()) + + super().init_process() diff --git a/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/LICENSE b/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/LICENSE new file mode 100644 index 0000000..7082a2d --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2013-2019 Nikolay Kim and Andrew Svetlov + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/METADATA b/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/METADATA new file mode 100644 index 0000000..fc96452 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/METADATA @@ -0,0 +1,128 @@ +Metadata-Version: 2.1 +Name: aiosignal +Version: 1.3.1 +Summary: aiosignal: a list of registered asynchronous callbacks +Home-page: https://github.com/aio-libs/aiosignal +Maintainer: aiohttp team +Maintainer-email: team@aiohttp.org +License: Apache 2.0 +Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby +Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiosignal/actions +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiosignal +Project-URL: Docs: RTD, https://docs.aiosignal.org +Project-URL: GitHub: issues, https://github.com/aio-libs/aiosignal/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/aiosignal +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Framework :: AsyncIO +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: frozenlist (>=1.1.0) + +========= +aiosignal +========= + +.. image:: https://github.com/aio-libs/aiosignal/workflows/CI/badge.svg + :target: https://github.com/aio-libs/aiosignal/actions?query=workflow%3ACI + :alt: GitHub status for master branch + +.. image:: https://codecov.io/gh/aio-libs/aiosignal/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/aiosignal + :alt: codecov.io status for master branch + +.. image:: https://badge.fury.io/py/aiosignal.svg + :target: https://pypi.org/project/aiosignal + :alt: Latest PyPI package version + +.. image:: https://readthedocs.org/projects/aiosignal/badge/?version=latest + :target: https://aiosignal.readthedocs.io/ + :alt: Latest Read The Docs + +.. image:: https://img.shields.io/discourse/topics?server=https%3A%2F%2Faio-libs.discourse.group%2F + :target: https://aio-libs.discourse.group/ + :alt: Discourse group for io-libs + +.. image:: https://badges.gitter.im/Join%20Chat.svg + :target: https://gitter.im/aio-libs/Lobby + :alt: Chat on Gitter + +Introduction +============ + +A project to manage callbacks in `asyncio` projects. + +``Signal`` is a list of registered asynchronous callbacks. + +The signal's life-cycle has two stages: after creation its content +could be filled by using standard list operations: ``sig.append()`` +etc. + +After you call ``sig.freeze()`` the signal is *frozen*: adding, removing +and dropping callbacks is forbidden. + +The only available operation is calling the previously registered +callbacks by using ``await sig.send(data)``. + +For concrete usage examples see the `Signals + +section of the `Web Server Advanced +` chapter of the `aiohttp +documentation`_. + + +Installation +------------ + +:: + + $ pip install aiosignal + +The library requires Python 3.6 or newer. + + +Documentation +============= + +https://aiosignal.readthedocs.io/ + +Communication channels +====================== + +*gitter chat* https://gitter.im/aio-libs/Lobby + +Requirements +============ + +- Python >= 3.6 +- frozenlist >= 1.0.0 + +License +======= + +``aiosignal`` is offered under the Apache 2 license. + +Source code +=========== + +The project is hosted on GitHub_ + +Please file an issue in the `bug tracker +`_ if you have found a bug +or have some suggestions to improve the library. + +.. _GitHub: https://github.com/aio-libs/aiosignal +.. _aiohttp documentation: https://docs.aiohttp.org/ diff --git a/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/RECORD b/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/RECORD new file mode 100644 index 0000000..ea4440e --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/RECORD @@ -0,0 +1,10 @@ +aiosignal-1.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +aiosignal-1.3.1.dist-info/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332 +aiosignal-1.3.1.dist-info/METADATA,sha256=c0HRnlYzfXKztZPTFDlPfygizTherhG5WdwXlvco0Ug,4008 +aiosignal-1.3.1.dist-info/RECORD,, +aiosignal-1.3.1.dist-info/WHEEL,sha256=ZL1lC_LiPDNRgDnOl2taCMc83aPEUZgHHv2h-LDgdiM,92 +aiosignal-1.3.1.dist-info/top_level.txt,sha256=z45aNOKGDdrI1roqZY3BGXQ22kJFPHBmVdwtLYLtXC0,10 +aiosignal/__init__.py,sha256=zQNfFYRSd84bswvpFv8ZWjEr5DeYwV3LXbMSyo2222s,867 +aiosignal/__init__.pyi,sha256=xeCddYSS8fZAkz8S4HuKSR2IDe3N7RW_LKcXDPPA1Xk,311 +aiosignal/__pycache__/__init__.cpython-310.pyc,, +aiosignal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/WHEEL b/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/WHEEL new file mode 100644 index 0000000..5e1f087 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/top_level.txt new file mode 100644 index 0000000..ac6df3a --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/top_level.txt @@ -0,0 +1 @@ +aiosignal diff --git a/venv/lib/python3.10/site-packages/aiosignal/__init__.py b/venv/lib/python3.10/site-packages/aiosignal/__init__.py new file mode 100644 index 0000000..3d288e6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiosignal/__init__.py @@ -0,0 +1,36 @@ +from frozenlist import FrozenList + +__version__ = "1.3.1" + +__all__ = ("Signal",) + + +class Signal(FrozenList): + """Coroutine-based signal implementation. + + To connect a callback to a signal, use any list method. + + Signals are fired using the send() coroutine, which takes named + arguments. + """ + + __slots__ = ("_owner",) + + def __init__(self, owner): + super().__init__() + self._owner = owner + + def __repr__(self): + return "".format( + self._owner, self.frozen, list(self) + ) + + async def send(self, *args, **kwargs): + """ + Sends data to all registered receivers. + """ + if not self.frozen: + raise RuntimeError("Cannot send non-frozen signal.") + + for receiver in self: + await receiver(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.10/site-packages/aiosignal/__init__.pyi b/venv/lib/python3.10/site-packages/aiosignal/__init__.pyi new file mode 100644 index 0000000..d4e3416 --- /dev/null +++ b/venv/lib/python3.10/site-packages/aiosignal/__init__.pyi @@ -0,0 +1,12 @@ +from typing import Any, Generic, TypeVar + +from frozenlist import FrozenList + +__all__ = ("Signal",) + +_T = TypeVar("_T") + +class Signal(FrozenList[_T], Generic[_T]): + def __init__(self, owner: Any) -> None: ... + def __repr__(self) -> str: ... + async def send(self, *args: Any, **kwargs: Any) -> None: ... diff --git a/venv/lib/python3.10/site-packages/aiosignal/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/aiosignal/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..acc77ac Binary files /dev/null and b/venv/lib/python3.10/site-packages/aiosignal/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/aiosignal/py.typed b/venv/lib/python3.10/site-packages/aiosignal/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/LICENSE b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/LICENSE new file mode 100644 index 0000000..104eebf --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2018 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/METADATA new file mode 100644 index 0000000..24d611d --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/METADATA @@ -0,0 +1,106 @@ +Metadata-Version: 2.1 +Name: anyio +Version: 3.5.0 +Summary: High level compatibility layer for multiple asynchronous event loop implementations +Home-page: UNKNOWN +Author: Alex Grönholm +Author-email: alex.gronholm@nextday.fi +License: MIT +Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ +Project-URL: Source code, https://github.com/agronholm/anyio +Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Framework :: AnyIO +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.6.2 +License-File: LICENSE +Requires-Dist: idna (>=2.8) +Requires-Dist: sniffio (>=1.1) +Requires-Dist: contextvars ; python_version < "3.7" +Requires-Dist: dataclasses ; python_version < "3.7" +Requires-Dist: typing-extensions ; python_version < "3.8" +Provides-Extra: doc +Requires-Dist: packaging ; extra == 'doc' +Requires-Dist: sphinx-rtd-theme ; extra == 'doc' +Requires-Dist: sphinx-autodoc-typehints (>=1.2.0) ; extra == 'doc' +Provides-Extra: test +Requires-Dist: coverage[toml] (>=4.5) ; extra == 'test' +Requires-Dist: hypothesis (>=4.0) ; extra == 'test' +Requires-Dist: pytest (>=6.0) ; extra == 'test' +Requires-Dist: pytest-mock (>=3.6.1) ; extra == 'test' +Requires-Dist: trustme ; extra == 'test' +Requires-Dist: contextlib2 ; (python_version < "3.7") and extra == 'test' +Requires-Dist: uvloop (<0.15) ; (python_version < "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test' +Requires-Dist: mock (>=4) ; (python_version < "3.8") and extra == 'test' +Requires-Dist: uvloop (>=0.15) ; (python_version >= "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test' +Provides-Extra: trio +Requires-Dist: trio (>=0.16) ; extra == 'trio' + +.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg + :target: https://github.com/agronholm/anyio/actions/workflows/test.yml + :alt: Build Status +.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master + :target: https://coveralls.io/github/agronholm/anyio?branch=master + :alt: Code Coverage +.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest + :target: https://anyio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation +.. image:: https://badges.gitter.im/gitterHQ/gitter.svg + :target: https://gitter.im/python-trio/AnyIO + :alt: Gitter chat + +AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or +trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio, and works in harmony +with the native SC of trio itself. + +Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or +trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full +refactoring necessary. It will blend in with native libraries of your chosen backend. + +Documentation +------------- + +View full documentation at: https://anyio.readthedocs.io/ + +Features +-------- + +AnyIO offers the following functionality: + +* Task groups (nurseries_ in trio terminology) +* High level networking (TCP, UDP and UNIX sockets) + + * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python + 3.8) + * async/await style UDP sockets (unlike asyncio where you still have to use Transports and + Protocols) + +* A versatile API for byte streams and object streams +* Inter-task synchronization and communication (locks, conditions, events, semaphores, object + streams) +* Worker threads +* Subprocesses +* Asynchronous file I/O (using worker threads) +* Signal handling + +AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. +It even works with the popular Hypothesis_ library. + +.. _asyncio: https://docs.python.org/3/library/asyncio.html +.. _trio: https://github.com/python-trio/trio +.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency +.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning +.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs +.. _pytest: https://docs.pytest.org/en/latest/ +.. _Hypothesis: https://hypothesis.works/ + + diff --git a/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/RECORD new file mode 100644 index 0000000..8bcc523 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/RECORD @@ -0,0 +1,82 @@ +anyio-3.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +anyio-3.5.0.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 +anyio-3.5.0.dist-info/METADATA,sha256=qiAHEKm52YTAQjLLGrl1dHcWDN9wgUtRo_hbjC6N3og,4693 +anyio-3.5.0.dist-info/RECORD,, +anyio-3.5.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +anyio-3.5.0.dist-info/entry_points.txt,sha256=z1bvtbND76CfYuqdNZxiaibWP2IOqSVa8FQKIk4lVQk,40 +anyio-3.5.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 +anyio/__init__.py,sha256=fxdj2SjxB6e-Q4lg_j3uDRK6MiZo-VXW4lVIV_YPpFk,3843 +anyio/__pycache__/__init__.cpython-310.pyc,, +anyio/__pycache__/from_thread.cpython-310.pyc,, +anyio/__pycache__/lowlevel.cpython-310.pyc,, +anyio/__pycache__/pytest_plugin.cpython-310.pyc,, +anyio/__pycache__/to_process.cpython-310.pyc,, +anyio/__pycache__/to_thread.cpython-310.pyc,, +anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_backends/__pycache__/__init__.cpython-310.pyc,, +anyio/_backends/__pycache__/_asyncio.cpython-310.pyc,, +anyio/_backends/__pycache__/_trio.cpython-310.pyc,, +anyio/_backends/_asyncio.py,sha256=l8780cOAPM2wbAfuPJQTTHyuKiX5efFydzSEQEHHiks,66395 +anyio/_backends/_trio.py,sha256=wu-9Sx53rUqVpyX2O7bUA4ElmkoHRbqBscRoP1xXAXU,27664 +anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_core/__pycache__/__init__.cpython-310.pyc,, +anyio/_core/__pycache__/_compat.cpython-310.pyc,, +anyio/_core/__pycache__/_eventloop.cpython-310.pyc,, +anyio/_core/__pycache__/_exceptions.cpython-310.pyc,, +anyio/_core/__pycache__/_fileio.cpython-310.pyc,, +anyio/_core/__pycache__/_resources.cpython-310.pyc,, +anyio/_core/__pycache__/_signals.cpython-310.pyc,, +anyio/_core/__pycache__/_sockets.cpython-310.pyc,, +anyio/_core/__pycache__/_streams.cpython-310.pyc,, +anyio/_core/__pycache__/_subprocesses.cpython-310.pyc,, +anyio/_core/__pycache__/_synchronization.cpython-310.pyc,, +anyio/_core/__pycache__/_tasks.cpython-310.pyc,, +anyio/_core/__pycache__/_testing.cpython-310.pyc,, +anyio/_core/__pycache__/_typedattr.cpython-310.pyc,, +anyio/_core/_compat.py,sha256=RM2vCmSviAmW0qFKuMaCGzn3JKD63UMNOIM6X6rGjdU,5668 +anyio/_core/_eventloop.py,sha256=3CrLCclhm1R_K_6wK3LP_Q7eEEqEwuIv7M3At1hhZkc,4055 +anyio/_core/_exceptions.py,sha256=CfFeKh4K25Z2X0lff2ahbVoBx1M3hxVagoOQwfRnEC8,2829 +anyio/_core/_fileio.py,sha256=9BAV2LZ-90cvmEQ1AuKhCNxqQeWCXr8qDqjZtHZ3aC4,18062 +anyio/_core/_resources.py,sha256=M_uN-90N8eSsWuvo-0xluWU_OG2BTyccAgsQ7XtHxzs,399 +anyio/_core/_signals.py,sha256=ub6LfvBz-z3O1qj8-WkWi46t_dpcPTefSfC27NBs-lU,820 +anyio/_core/_sockets.py,sha256=7S8UKPkfgcya0qXTkUisrnrR_lEPR56HbFbvB1ehz2Q,19784 +anyio/_core/_streams.py,sha256=GiNATCZCl2BDRCOXwPMV9Bonz7NbFSa4xK_IKUb5hWI,1483 +anyio/_core/_subprocesses.py,sha256=n6cX_LNh3gyo-lTDUi0pVV6hmr5Au3QkUfAVuVeHXpE,4869 +anyio/_core/_synchronization.py,sha256=nsrsv9ee7_sEUV6uncsfg_8sfc4nO-CbM13tUXRsE_Y,16720 +anyio/_core/_tasks.py,sha256=nhM5aEbdUjOdL3aCUonp3dy1zurl7OGFSsPg8OujEmE,5199 +anyio/_core/_testing.py,sha256=bp6n3_KFC68AhUAcu0XGq0aZRYSeQWP4FY3uWtsfc_8,2166 +anyio/_core/_typedattr.py,sha256=0hYrxkAFHCEBkcIC1-goHLd5bXth5VbNkCLTojvNbaM,2496 +anyio/abc/__init__.py,sha256=ugKefsiv5Y4DGLzEYsOrS-izkTO6UNM7v9dYpbLAknQ,1980 +anyio/abc/__pycache__/__init__.cpython-310.pyc,, +anyio/abc/__pycache__/_resources.cpython-310.pyc,, +anyio/abc/__pycache__/_sockets.cpython-310.pyc,, +anyio/abc/__pycache__/_streams.cpython-310.pyc,, +anyio/abc/__pycache__/_subprocesses.cpython-310.pyc,, +anyio/abc/__pycache__/_tasks.cpython-310.pyc,, +anyio/abc/__pycache__/_testing.cpython-310.pyc,, +anyio/abc/_resources.py,sha256=VC7Gzy8xwOGrPtfjNuSjGaKVXmBy0IS4sVpEwq2vZa0,761 +anyio/abc/_sockets.py,sha256=uFgijTGLAHbrfK8JA3arScbiN0o88bf0uUSlq4MjnEg,5605 +anyio/abc/_streams.py,sha256=h_EXlQsbpwt63gd2jSjaGBLprBfzG7vcSQYIZuDI5LY,6516 +anyio/abc/_subprocesses.py,sha256=iREP_YQ91it88lDU4XIcI3HZ9HUvV5UmjQk_sSPonrw,2071 +anyio/abc/_tasks.py,sha256=bcNfMaayFOrrlpPRklklK2GfIXGWgRaP-HUs35-J_18,3051 +anyio/abc/_testing.py,sha256=LfRDpPw4FQrja9dkhzV_RovBmV4sxqvzxHX5YrV6lYc,1147 +anyio/from_thread.py,sha256=6qdCL0PS6pbh3fdDPgR2uLucrAKcVDwCaZlB_DcPeNA,16042 +anyio/lowlevel.py,sha256=98x-Z9jKxEeuvZs7KFP15bZ6D-n-SlEzmxjRRqj1YlU,4612 +anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/pytest_plugin.py,sha256=bguloPM9UfdxIGlteWnctgT2PXbs1zFRdZ_JHtIGSJc,5544 +anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/streams/__pycache__/__init__.cpython-310.pyc,, +anyio/streams/__pycache__/buffered.cpython-310.pyc,, +anyio/streams/__pycache__/file.cpython-310.pyc,, +anyio/streams/__pycache__/memory.cpython-310.pyc,, +anyio/streams/__pycache__/stapled.cpython-310.pyc,, +anyio/streams/__pycache__/text.cpython-310.pyc,, +anyio/streams/__pycache__/tls.cpython-310.pyc,, +anyio/streams/buffered.py,sha256=32jQEEkqefrmPgAXKAQoGnNSdm5l0zzaa0V_nYkwpbM,4435 +anyio/streams/file.py,sha256=HT-u90tt-zNwlRlZhKSpFhKrWEKd4QkLPR4ySF9FfUs,4345 +anyio/streams/memory.py,sha256=4qzW3_N69w-AdixRZOkCemF6veRBcV6-2IRjL63BXA8,9161 +anyio/streams/stapled.py,sha256=euIt3fnuvs3rE7Xn5QsDYhebP5neXAoyCVcAPcM6vpE,4168 +anyio/streams/text.py,sha256=iTrT7auMl2SGvFxGf-UA0DJAdTx2ZOW663q1ucMihzs,4966 +anyio/streams/tls.py,sha256=_DSW8p4l8xh5DR4tCi_8QS83wptgMcHJ_JSzPXdNPLE,11778 +anyio/to_process.py,sha256=tXGfHyGokeVERftxEU5AvygQS8OoOdPIFXTs8a_5lRw,9020 +anyio/to_thread.py,sha256=f-SIvh1-VSg78_R5k6JfP7sXJ5epx3eBa3cDPh1s8lk,2139 diff --git a/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/entry_points.txt new file mode 100644 index 0000000..1740df0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[pytest11] +anyio = anyio.pytest_plugin + diff --git a/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/top_level.txt new file mode 100644 index 0000000..c77c069 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio-3.5.0.dist-info/top_level.txt @@ -0,0 +1 @@ +anyio diff --git a/venv/lib/python3.10/site-packages/anyio/__init__.py b/venv/lib/python3.10/site-packages/anyio/__init__.py new file mode 100644 index 0000000..974e8c2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/__init__.py @@ -0,0 +1,116 @@ +__all__ = ( + 'maybe_async', + 'maybe_async_cm', + 'run', + 'sleep', + 'sleep_forever', + 'sleep_until', + 'current_time', + 'get_all_backends', + 'get_cancelled_exc_class', + 'BrokenResourceError', + 'BrokenWorkerProcess', + 'BusyResourceError', + 'ClosedResourceError', + 'DelimiterNotFound', + 'EndOfStream', + 'ExceptionGroup', + 'IncompleteRead', + 'TypedAttributeLookupError', + 'WouldBlock', + 'AsyncFile', + 'Path', + 'open_file', + 'wrap_file', + 'aclose_forcefully', + 'open_signal_receiver', + 'connect_tcp', + 'connect_unix', + 'create_tcp_listener', + 'create_unix_listener', + 'create_udp_socket', + 'create_connected_udp_socket', + 'getaddrinfo', + 'getnameinfo', + 'wait_socket_readable', + 'wait_socket_writable', + 'create_memory_object_stream', + 'run_process', + 'open_process', + 'create_lock', + 'CapacityLimiter', + 'CapacityLimiterStatistics', + 'Condition', + 'ConditionStatistics', + 'Event', + 'EventStatistics', + 'Lock', + 'LockStatistics', + 'Semaphore', + 'SemaphoreStatistics', + 'create_condition', + 'create_event', + 'create_semaphore', + 'create_capacity_limiter', + 'open_cancel_scope', + 'fail_after', + 'move_on_after', + 'current_effective_deadline', + 'TASK_STATUS_IGNORED', + 'CancelScope', + 'create_task_group', + 'TaskInfo', + 'get_current_task', + 'get_running_tasks', + 'wait_all_tasks_blocked', + 'run_sync_in_worker_thread', + 'run_async_from_thread', + 'run_sync_from_thread', + 'current_default_worker_thread_limiter', + 'create_blocking_portal', + 'start_blocking_portal', + 'typed_attribute', + 'TypedAttributeSet', + 'TypedAttributeProvider' +) + +from typing import Any + +from ._core._compat import maybe_async, maybe_async_cm +from ._core._eventloop import ( + current_time, get_all_backends, get_cancelled_exc_class, run, sleep, sleep_forever, + sleep_until) +from ._core._exceptions import ( + BrokenResourceError, BrokenWorkerProcess, BusyResourceError, ClosedResourceError, + DelimiterNotFound, EndOfStream, ExceptionGroup, IncompleteRead, TypedAttributeLookupError, + WouldBlock) +from ._core._fileio import AsyncFile, Path, open_file, wrap_file +from ._core._resources import aclose_forcefully +from ._core._signals import open_signal_receiver +from ._core._sockets import ( + connect_tcp, connect_unix, create_connected_udp_socket, create_tcp_listener, create_udp_socket, + create_unix_listener, getaddrinfo, getnameinfo, wait_socket_readable, wait_socket_writable) +from ._core._streams import create_memory_object_stream +from ._core._subprocesses import open_process, run_process +from ._core._synchronization import ( + CapacityLimiter, CapacityLimiterStatistics, Condition, ConditionStatistics, Event, + EventStatistics, Lock, LockStatistics, Semaphore, SemaphoreStatistics, create_capacity_limiter, + create_condition, create_event, create_lock, create_semaphore) +from ._core._tasks import ( + TASK_STATUS_IGNORED, CancelScope, create_task_group, current_effective_deadline, fail_after, + move_on_after, open_cancel_scope) +from ._core._testing import TaskInfo, get_current_task, get_running_tasks, wait_all_tasks_blocked +from ._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute + +# Re-exported here, for backwards compatibility +# isort: off +from .to_thread import current_default_worker_thread_limiter, run_sync_in_worker_thread +from .from_thread import ( + create_blocking_portal, run_async_from_thread, run_sync_from_thread, start_blocking_portal) + +# Re-export imports so they look like they live directly in this package +key: str +value: Any +for key, value in list(locals().items()): + if getattr(value, '__module__', '').startswith('anyio.'): + value.__module__ = __name__ diff --git a/venv/lib/python3.10/site-packages/anyio/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..4c77c79 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/__pycache__/from_thread.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/__pycache__/from_thread.cpython-310.pyc new file mode 100644 index 0000000..0a1e615 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/__pycache__/from_thread.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/__pycache__/lowlevel.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/__pycache__/lowlevel.cpython-310.pyc new file mode 100644 index 0000000..453452e Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/__pycache__/lowlevel.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/__pycache__/pytest_plugin.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/__pycache__/pytest_plugin.cpython-310.pyc new file mode 100644 index 0000000..cb4c8f8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/__pycache__/pytest_plugin.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/__pycache__/to_process.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/__pycache__/to_process.cpython-310.pyc new file mode 100644 index 0000000..5187cfe Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/__pycache__/to_process.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/__pycache__/to_thread.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/__pycache__/to_thread.cpython-310.pyc new file mode 100644 index 0000000..c0c52b6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/__pycache__/to_thread.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_backends/__init__.py b/venv/lib/python3.10/site-packages/anyio/_backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..c69c673 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-310.pyc new file mode 100644 index 0000000..d0ebb35 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/_trio.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/_trio.cpython-310.pyc new file mode 100644 index 0000000..04aac0d Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/_trio.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py b/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py new file mode 100644 index 0000000..f2929b4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py @@ -0,0 +1,1924 @@ +import array +import asyncio +import concurrent.futures +import math +import socket +import sys +from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] +from collections import OrderedDict, deque +from concurrent.futures import Future +from contextvars import Context, copy_context +from dataclasses import dataclass +from functools import partial, wraps +from inspect import ( + CORO_RUNNING, CORO_SUSPENDED, GEN_RUNNING, GEN_SUSPENDED, getcoroutinestate, getgeneratorstate) +from io import IOBase +from os import PathLike +from queue import Queue +from socket import AddressFamily, SocketKind +from threading import Thread +from types import TracebackType +from typing import ( + Any, Awaitable, Callable, Collection, Coroutine, Deque, Dict, Generator, Iterable, List, + Mapping, Optional, Sequence, Set, Tuple, Type, TypeVar, Union, cast) +from weakref import WeakKeyDictionary + +import sniffio + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable +from .._core._eventloop import claim_worker_thread, threadlocals +from .._core._exceptions import ( + BrokenResourceError, BusyResourceError, ClosedResourceError, EndOfStream) +from .._core._exceptions import ExceptionGroup as BaseExceptionGroup +from .._core._exceptions import WouldBlock +from .._core._sockets import GetAddrInfoReturnType, convert_ipv6_sockaddr +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType +from ..lowlevel import RunVar + +if sys.version_info >= (3, 8): + get_coro = asyncio.Task.get_coro +else: + def get_coro(task: asyncio.Task) -> Union[Generator, Awaitable[Any]]: + return task._coro + +if sys.version_info >= (3, 7): + from asyncio import all_tasks, create_task, current_task, get_running_loop + from asyncio import run as native_run + + def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]: + return [cb for cb, context in task._callbacks] # type: ignore[attr-defined] +else: + _T = TypeVar('_T') + + def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]: + return task._callbacks + + def native_run(main, *, debug=False): + # Snatched from Python 3.7 + from asyncio import coroutines, events, tasks + + def _cancel_all_tasks(loop): + to_cancel = all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete( + tasks.gather(*to_cancel, loop=loop, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler({ + 'message': 'unhandled exception during asyncio.run() shutdown', + 'exception': task.exception(), + 'task': task, + }) + + if events._get_running_loop() is not None: + raise RuntimeError( + "asyncio.run() cannot be called from a running event loop") + + if not coroutines.iscoroutine(main): + raise ValueError(f"a coroutine was expected, got {main!r}") + + loop = events.new_event_loop() + try: + events.set_event_loop(loop) + loop.set_debug(debug) + return loop.run_until_complete(main) + finally: + try: + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + finally: + events.set_event_loop(None) + loop.close() + + def create_task(coro: Union[Generator[Any, None, _T], Awaitable[_T]], *, + name: object = None) -> asyncio.Task: + return get_running_loop().create_task(coro) + + def get_running_loop() -> asyncio.AbstractEventLoop: + loop = asyncio._get_running_loop() + if loop is not None: + return loop + else: + raise RuntimeError('no running event loop') + + def all_tasks(loop: Optional[asyncio.AbstractEventLoop] = None) -> Set[asyncio.Task]: + """Return a set of all tasks for the loop.""" + from asyncio import Task + + if loop is None: + loop = get_running_loop() + + return {t for t in Task.all_tasks(loop) if not t.done()} + + def current_task(loop: Optional[asyncio.AbstractEventLoop] = None) -> Optional[asyncio.Task]: + if loop is None: + loop = get_running_loop() + + return asyncio.Task.current_task(loop) + +T_Retval = TypeVar('T_Retval') + +# Check whether there is native support for task names in asyncio (3.8+) +_native_task_names = hasattr(asyncio.Task, 'get_name') + + +_root_task: RunVar[Optional[asyncio.Task]] = RunVar('_root_task') + + +def find_root_task() -> asyncio.Task: + root_task = _root_task.get(None) + if root_task is not None and not root_task.done(): + return root_task + + # Look for a task that has been started via run_until_complete() + for task in all_tasks(): + if task._callbacks and not task.done(): + for cb in _get_task_callbacks(task): + if (cb is _run_until_complete_cb + or getattr(cb, '__module__', None) == 'uvloop.loop'): + _root_task.set(task) + return task + + # Look up the topmost task in the AnyIO task tree, if possible + task = cast(asyncio.Task, current_task()) + state = _task_states.get(task) + if state: + cancel_scope = state.cancel_scope + while cancel_scope and cancel_scope._parent_scope is not None: + cancel_scope = cancel_scope._parent_scope + + if cancel_scope is not None: + return cast(asyncio.Task, cancel_scope._host_task) + + return task + + +def get_callable_name(func: Callable) -> str: + module = getattr(func, '__module__', None) + qualname = getattr(func, '__qualname__', None) + return '.'.join([x for x in (module, qualname) if x]) + + +# +# Event loop +# + +_run_vars = WeakKeyDictionary() # type: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] + +current_token = get_running_loop + + +def _task_started(task: asyncio.Task) -> bool: + """Return ``True`` if the task has been started and has not finished.""" + coro = get_coro(task) + try: + return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED) + except AttributeError: + try: + return getgeneratorstate(cast(Generator, coro)) in (GEN_RUNNING, GEN_SUSPENDED) + except AttributeError: + # task coro is async_genenerator_asend https://bugs.python.org/issue37771 + raise Exception(f"Cannot determine if task {task} has started or not") + + +def _maybe_set_event_loop_policy(policy: Optional[asyncio.AbstractEventLoopPolicy], + use_uvloop: bool) -> None: + # On CPython, use uvloop when possible if no other policy has been given and if not + # explicitly disabled + if policy is None and use_uvloop and sys.implementation.name == 'cpython': + try: + import uvloop + except ImportError: + pass + else: + # Test for missing shutdown_default_executor() (uvloop 0.14.0 and earlier) + if (not hasattr(asyncio.AbstractEventLoop, 'shutdown_default_executor') + or hasattr(uvloop.loop.Loop, 'shutdown_default_executor')): + policy = uvloop.EventLoopPolicy() + + if policy is not None: + asyncio.set_event_loop_policy(policy) + + +def run(func: Callable[..., Awaitable[T_Retval]], *args: object, + debug: bool = False, use_uvloop: bool = False, + policy: Optional[asyncio.AbstractEventLoopPolicy] = None) -> T_Retval: + @wraps(func) + async def wrapper() -> T_Retval: + task = cast(asyncio.Task, current_task()) + task_state = TaskState(None, get_callable_name(func), None) + _task_states[task] = task_state + if _native_task_names: + task.set_name(task_state.name) + + try: + return await func(*args) + finally: + del _task_states[task] + + _maybe_set_event_loop_policy(policy, use_uvloop) + return native_run(wrapper(), debug=debug) + + +# +# Miscellaneous +# + +sleep = asyncio.sleep + + +# +# Timeouts and cancellation +# + +CancelledError = asyncio.CancelledError + + +class CancelScope(BaseCancelScope): + def __new__(cls, *, deadline: float = math.inf, shield: bool = False) -> "CancelScope": + return object.__new__(cls) + + def __init__(self, deadline: float = math.inf, shield: bool = False): + self._deadline = deadline + self._shield = shield + self._parent_scope: Optional[CancelScope] = None + self._cancel_called = False + self._active = False + self._timeout_handle: Optional[asyncio.TimerHandle] = None + self._cancel_handle: Optional[asyncio.Handle] = None + self._tasks: Set[asyncio.Task] = set() + self._host_task: Optional[asyncio.Task] = None + self._timeout_expired = False + + def __enter__(self) -> "CancelScope": + if self._active: + raise RuntimeError( + "Each CancelScope may only be used for a single 'with' block" + ) + + self._host_task = host_task = cast(asyncio.Task, current_task()) + self._tasks.add(host_task) + try: + task_state = _task_states[host_task] + except KeyError: + task_name = host_task.get_name() if _native_task_names else None + task_state = TaskState(None, task_name, self) + _task_states[host_task] = task_state + else: + self._parent_scope = task_state.cancel_scope + task_state.cancel_scope = self + + self._timeout() + self._active = True + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + if not self._active: + raise RuntimeError('This cancel scope is not active') + if current_task() is not self._host_task: + raise RuntimeError('Attempted to exit cancel scope in a different task than it was ' + 'entered in') + + assert self._host_task is not None + host_task_state = _task_states.get(self._host_task) + if host_task_state is None or host_task_state.cancel_scope is not self: + raise RuntimeError("Attempted to exit a cancel scope that isn't the current tasks's " + "current cancel scope") + + self._active = False + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._tasks.remove(self._host_task) + + host_task_state.cancel_scope = self._parent_scope + + # Restart the cancellation effort in the farthest directly cancelled parent scope if this + # one was shielded + if self._shield: + self._deliver_cancellation_to_parent() + + if exc_val is not None: + exceptions = exc_val.exceptions if isinstance(exc_val, ExceptionGroup) else [exc_val] + if all(isinstance(exc, CancelledError) for exc in exceptions): + if self._timeout_expired: + return True + elif not self._cancel_called: + # Task was cancelled natively + return None + elif not self._parent_cancelled(): + # This scope was directly cancelled + return True + + return None + + def _timeout(self) -> None: + if self._deadline != math.inf: + loop = get_running_loop() + if loop.time() >= self._deadline: + self._timeout_expired = True + self.cancel() + else: + self._timeout_handle = loop.call_at(self._deadline, self._timeout) + + def _deliver_cancellation(self) -> None: + """ + Deliver cancellation to directly contained tasks and nested cancel scopes. + + Schedule another run at the end if we still have tasks eligible for cancellation. + """ + should_retry = False + current = current_task() + for task in self._tasks: + if task._must_cancel: # type: ignore[attr-defined] + continue + + # The task is eligible for cancellation if it has started and is not in a cancel + # scope shielded from this one + cancel_scope = _task_states[task].cancel_scope + while cancel_scope is not self: + if cancel_scope is None or cancel_scope._shield: + break + else: + cancel_scope = cancel_scope._parent_scope + else: + should_retry = True + if task is not current and (task is self._host_task or _task_started(task)): + task.cancel() + + # Schedule another callback if there are still tasks left + if should_retry: + self._cancel_handle = get_running_loop().call_soon(self._deliver_cancellation) + else: + self._cancel_handle = None + + def _deliver_cancellation_to_parent(self) -> None: + """Start cancellation effort in the farthest directly cancelled parent scope""" + scope = self._parent_scope + scope_to_cancel: Optional[CancelScope] = None + while scope is not None: + if scope._cancel_called and scope._cancel_handle is None: + scope_to_cancel = scope + + # No point in looking beyond any shielded scope + if scope._shield: + break + + scope = scope._parent_scope + + if scope_to_cancel is not None: + scope_to_cancel._deliver_cancellation() + + def _parent_cancelled(self) -> bool: + # Check whether any parent has been cancelled + cancel_scope = self._parent_scope + while cancel_scope is not None and not cancel_scope._shield: + if cancel_scope._cancel_called: + return True + else: + cancel_scope = cancel_scope._parent_scope + + return False + + def cancel(self) -> DeprecatedAwaitable: + if not self._cancel_called: + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._cancel_called = True + self._deliver_cancellation() + + return DeprecatedAwaitable(self.cancel) + + @property + def deadline(self) -> float: + return self._deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self._deadline = float(value) + if self._timeout_handle is not None: + self._timeout_handle.cancel() + self._timeout_handle = None + + if self._active and not self._cancel_called: + self._timeout() + + @property + def cancel_called(self) -> bool: + return self._cancel_called + + @property + def shield(self) -> bool: + return self._shield + + @shield.setter + def shield(self, value: bool) -> None: + if self._shield != value: + self._shield = value + if not value: + self._deliver_cancellation_to_parent() + + +async def checkpoint() -> None: + await sleep(0) + + +async def checkpoint_if_cancelled() -> None: + task = current_task() + if task is None: + return + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return + + while cancel_scope: + if cancel_scope.cancel_called: + await sleep(0) + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + +async def cancel_shielded_checkpoint() -> None: + with CancelScope(shield=True): + await sleep(0) + + +def current_effective_deadline() -> float: + try: + cancel_scope = _task_states[current_task()].cancel_scope # type: ignore[index] + except KeyError: + return math.inf + + deadline = math.inf + while cancel_scope: + deadline = min(deadline, cancel_scope.deadline) + if cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + return deadline + + +def current_time() -> float: + return get_running_loop().time() + + +# +# Task states +# + +class TaskState: + """ + Encapsulates auxiliary task information that cannot be added to the Task instance itself + because there are no guarantees about its implementation. + """ + + __slots__ = 'parent_id', 'name', 'cancel_scope' + + def __init__(self, parent_id: Optional[int], name: Optional[str], + cancel_scope: Optional[CancelScope]): + self.parent_id = parent_id + self.name = name + self.cancel_scope = cancel_scope + + +_task_states = WeakKeyDictionary() # type: WeakKeyDictionary[asyncio.Task, TaskState] + + +# +# Task groups +# + +class ExceptionGroup(BaseExceptionGroup): + def __init__(self, exceptions: List[BaseException]): + super().__init__() + self.exceptions = exceptions + + +class _AsyncioTaskStatus(abc.TaskStatus): + def __init__(self, future: asyncio.Future, parent_id: int): + self._future = future + self._parent_id = parent_id + + def started(self, value: object = None) -> None: + try: + self._future.set_result(value) + except asyncio.InvalidStateError: + raise RuntimeError("called 'started' twice on the same task status") from None + + task = cast(asyncio.Task, current_task()) + _task_states[task].parent_id = self._parent_id + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self.cancel_scope: CancelScope = CancelScope() + self._active = False + self._exceptions: List[BaseException] = [] + + async def __aenter__(self) -> "TaskGroup": + self.cancel_scope.__enter__() + self._active = True + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + ignore_exception = self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) + if exc_val is not None: + self.cancel_scope.cancel() + self._exceptions.append(exc_val) + + while self.cancel_scope._tasks: + try: + await asyncio.wait(self.cancel_scope._tasks) + except asyncio.CancelledError: + self.cancel_scope.cancel() + + self._active = False + if not self.cancel_scope._parent_cancelled(): + exceptions = self._filter_cancellation_errors(self._exceptions) + else: + exceptions = self._exceptions + + try: + if len(exceptions) > 1: + if all(isinstance(e, CancelledError) and not e.args for e in exceptions): + # Tasks were cancelled natively, without a cancellation message + raise CancelledError + else: + raise ExceptionGroup(exceptions) + elif exceptions and exceptions[0] is not exc_val: + raise exceptions[0] + except BaseException as exc: + # Clear the context here, as it can only be done in-flight. + # If the context is not cleared, it can result in recursive tracebacks (see #145). + exc.__context__ = None + raise + + return ignore_exception + + @staticmethod + def _filter_cancellation_errors(exceptions: Sequence[BaseException]) -> List[BaseException]: + filtered_exceptions: List[BaseException] = [] + for exc in exceptions: + if isinstance(exc, ExceptionGroup): + new_exceptions = TaskGroup._filter_cancellation_errors(exc.exceptions) + if len(new_exceptions) > 1: + filtered_exceptions.append(exc) + elif len(new_exceptions) == 1: + filtered_exceptions.append(new_exceptions[0]) + elif new_exceptions: + new_exc = ExceptionGroup(new_exceptions) + new_exc.__cause__ = exc.__cause__ + new_exc.__context__ = exc.__context__ + new_exc.__traceback__ = exc.__traceback__ + filtered_exceptions.append(new_exc) + elif not isinstance(exc, CancelledError) or exc.args: + filtered_exceptions.append(exc) + + return filtered_exceptions + + async def _run_wrapped_task( + self, coro: Coroutine, task_status_future: Optional[asyncio.Future]) -> None: + # This is the code path for Python 3.6 and 3.7 on which asyncio freaks out if a task raises + # a BaseException. + __traceback_hide__ = __tracebackhide__ = True # noqa: F841 + task = cast(asyncio.Task, current_task()) + try: + await coro + except BaseException as exc: + if task_status_future is None or task_status_future.done(): + self._exceptions.append(exc) + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + else: + if task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError('Child exited without calling task_status.started()')) + finally: + if task in self.cancel_scope._tasks: + self.cancel_scope._tasks.remove(task) + del _task_states[task] + + def _spawn(self, func: Callable[..., Coroutine], args: tuple, name: object, + task_status_future: Optional[asyncio.Future] = None) -> asyncio.Task: + def task_done(_task: asyncio.Task) -> None: + # This is the code path for Python 3.8+ + assert _task in self.cancel_scope._tasks + self.cancel_scope._tasks.remove(_task) + del _task_states[_task] + + try: + exc = _task.exception() + except CancelledError as e: + while isinstance(e.__context__, CancelledError): + e = e.__context__ + + exc = e + + if exc is not None: + if task_status_future is None or task_status_future.done(): + self._exceptions.append(exc) + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + elif task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError('Child exited without calling task_status.started()')) + + if not self._active: + raise RuntimeError('This task group is not active; no new tasks can be started.') + + options = {} + name = get_callable_name(func) if name is None else str(name) + if _native_task_names: + options['name'] = name + + kwargs = {} + if task_status_future: + parent_id = id(current_task()) + kwargs['task_status'] = _AsyncioTaskStatus(task_status_future, + id(self.cancel_scope._host_task)) + else: + parent_id = id(self.cancel_scope._host_task) + + coro = func(*args, **kwargs) + if not asyncio.iscoroutine(coro): + raise TypeError(f'Expected an async function, but {func} appears to be synchronous') + + foreign_coro = not hasattr(coro, 'cr_frame') and not hasattr(coro, 'gi_frame') + if foreign_coro or sys.version_info < (3, 8): + coro = self._run_wrapped_task(coro, task_status_future) + + task = create_task(coro, **options) + if not foreign_coro and sys.version_info >= (3, 8): + task.add_done_callback(task_done) + + # Make the spawned task inherit the task group's cancel scope + _task_states[task] = TaskState(parent_id=parent_id, name=name, + cancel_scope=self.cancel_scope) + self.cancel_scope._tasks.add(task) + return task + + def start_soon(self, func: Callable[..., Coroutine], *args: object, + name: object = None) -> None: + self._spawn(func, args, name) + + async def start(self, func: Callable[..., Coroutine], *args: object, + name: object = None) -> None: + future: asyncio.Future = asyncio.Future() + task = self._spawn(func, args, name, future) + + # If the task raises an exception after sending a start value without a switch point + # between, the task group is cancelled and this method never proceeds to process the + # completed future. That's why we have to have a shielded cancel scope here. + with CancelScope(shield=True): + try: + return await future + except CancelledError: + task.cancel() + raise + + +# +# Threads +# + +_Retval_Queue_Type = Tuple[Optional[T_Retval], Optional[BaseException]] + + +class WorkerThread(Thread): + MAX_IDLE_TIME = 10 # seconds + + def __init__(self, root_task: asyncio.Task, workers: Set['WorkerThread'], + idle_workers: Deque['WorkerThread']): + super().__init__(name='AnyIO worker thread') + self.root_task = root_task + self.workers = workers + self.idle_workers = idle_workers + self.loop = root_task._loop + self.queue: Queue[Union[Tuple[Context, Callable, tuple, asyncio.Future], None]] = Queue(2) + self.idle_since = current_time() + self.stopping = False + + def _report_result(self, future: asyncio.Future, result: Any, + exc: Optional[BaseException]) -> None: + self.idle_since = current_time() + if not self.stopping: + self.idle_workers.append(self) + + if not future.cancelled(): + if exc is not None: + future.set_exception(exc) + else: + future.set_result(result) + + def run(self) -> None: + with claim_worker_thread('asyncio'): + threadlocals.loop = self.loop + while True: + item = self.queue.get() + if item is None: + # Shutdown command received + return + + context, func, args, future = item + if not future.cancelled(): + result = None + exception: Optional[BaseException] = None + try: + result = context.run(func, *args) + except BaseException as exc: + exception = exc + + if not self.loop.is_closed(): + self.loop.call_soon_threadsafe( + self._report_result, future, result, exception) + + self.queue.task_done() + + def stop(self, f: Optional[asyncio.Task] = None) -> None: + self.stopping = True + self.queue.put_nowait(None) + self.workers.discard(self) + try: + self.idle_workers.remove(self) + except ValueError: + pass + + +_threadpool_idle_workers: RunVar[Deque[WorkerThread]] = RunVar('_threadpool_idle_workers') +_threadpool_workers: RunVar[Set[WorkerThread]] = RunVar('_threadpool_workers') + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional['CapacityLimiter'] = None) -> T_Retval: + await checkpoint() + + # If this is the first run in this event loop thread, set up the necessary variables + try: + idle_workers = _threadpool_idle_workers.get() + workers = _threadpool_workers.get() + except LookupError: + idle_workers = deque() + workers = set() + _threadpool_idle_workers.set(idle_workers) + _threadpool_workers.set(workers) + + async with (limiter or current_default_thread_limiter()): + with CancelScope(shield=not cancellable): + future: asyncio.Future = asyncio.Future() + root_task = find_root_task() + if not idle_workers: + worker = WorkerThread(root_task, workers, idle_workers) + worker.start() + workers.add(worker) + root_task.add_done_callback(worker.stop) + else: + worker = idle_workers.pop() + + # Prune any other workers that have been idle for MAX_IDLE_TIME seconds or longer + now = current_time() + while idle_workers: + if now - idle_workers[0].idle_since < WorkerThread.MAX_IDLE_TIME: + break + + expired_worker = idle_workers.popleft() + expired_worker.root_task.remove_done_callback(expired_worker.stop) + expired_worker.stop() + + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, None) + worker.queue.put_nowait((context, func, args, future)) + return await future + + +def run_sync_from_thread(func: Callable[..., T_Retval], *args: object, + loop: Optional[asyncio.AbstractEventLoop] = None) -> T_Retval: + @wraps(func) + def wrapper() -> None: + try: + f.set_result(func(*args)) + except BaseException as exc: + f.set_exception(exc) + if not isinstance(exc, Exception): + raise + + f: concurrent.futures.Future[T_Retval] = Future() + loop = loop or threadlocals.loop + if sys.version_info < (3, 7): + loop.call_soon_threadsafe(copy_context().run, wrapper) + else: + loop.call_soon_threadsafe(wrapper) + + return f.result() + + +def run_async_from_thread( + func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object +) -> T_Retval: + f: concurrent.futures.Future[T_Retval] = asyncio.run_coroutine_threadsafe( + func(*args), threadlocals.loop) + return f.result() + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> "BlockingPortal": + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._loop = get_running_loop() + + def _spawn_task_from_thread(self, func: Callable, args: tuple, kwargs: Dict[str, Any], + name: object, future: Future) -> None: + run_sync_from_thread( + partial(self._task_group.start_soon, name=name), self._call_func, func, args, kwargs, + future, loop=self._loop) + + +# +# Subprocesses +# + +@dataclass(eq=False) +class StreamReaderWrapper(abc.ByteReceiveStream): + _stream: asyncio.StreamReader + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._stream.read(max_bytes) + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + self._stream.feed_eof() + + +@dataclass(eq=False) +class StreamWriterWrapper(abc.ByteSendStream): + _stream: asyncio.StreamWriter + + async def send(self, item: bytes) -> None: + self._stream.write(item) + await self._stream.drain() + + async def aclose(self) -> None: + self._stream.close() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: asyncio.subprocess.Process + _stdin: Optional[StreamWriterWrapper] + _stdout: Optional[StreamReaderWrapper] + _stderr: Optional[StreamReaderWrapper] + + async def aclose(self) -> None: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + await self.wait() + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: int) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> Optional[int]: + return self._process.returncode + + @property + def stdin(self) -> Optional[abc.ByteSendStream]: + return self._stdin + + @property + def stdout(self) -> Optional[abc.ByteReceiveStream]: + return self._stdout + + @property + def stderr(self) -> Optional[abc.ByteReceiveStream]: + return self._stderr + + +async def open_process(command: Union[str, Sequence[str]], *, shell: bool, + stdin: int, stdout: int, stderr: int, + cwd: Union[str, bytes, PathLike, None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False) -> Process: + await checkpoint() + if shell: + process = await asyncio.create_subprocess_shell( + command, stdin=stdin, stdout=stdout, # type: ignore[arg-type] + stderr=stderr, cwd=cwd, env=env, start_new_session=start_new_session, + ) + else: + process = await asyncio.create_subprocess_exec(*command, stdin=stdin, stdout=stdout, + stderr=stderr, cwd=cwd, env=env, + start_new_session=start_new_session) + + stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None + stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None + stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + +def _forcibly_shutdown_process_pool_on_exit(workers: Set[Process], _task: object) -> None: + """ + Forcibly shuts down worker processes belonging to this event loop.""" + child_watcher: Optional[asyncio.AbstractChildWatcher] + try: + child_watcher = asyncio.get_event_loop_policy().get_child_watcher() + except NotImplementedError: + child_watcher = None + + # Close as much as possible (w/o async/await) to avoid warnings + for process in workers: + if process.returncode is None: + continue + + process._stdin._stream._transport.close() # type: ignore[union-attr] + process._stdout._stream._transport.close() # type: ignore[union-attr] + process._stderr._stream._transport.close() # type: ignore[union-attr] + process.kill() + if child_watcher: + child_watcher.remove_child_handler(process.pid) + + +async def _shutdown_process_pool_on_exit(workers: Set[Process]) -> None: + """ + Shuts down worker processes belonging to this event loop. + + NOTE: this only works when the event loop was started using asyncio.run() or anyio.run(). + + """ + process: Process + try: + await sleep(math.inf) + except asyncio.CancelledError: + for process in workers: + if process.returncode is None: + process.kill() + + for process in workers: + await process.aclose() + + +def setup_process_pool_exit_at_shutdown(workers: Set[Process]) -> None: + kwargs = {'name': 'AnyIO process pool shutdown task'} if _native_task_names else {} + create_task(_shutdown_process_pool_on_exit(workers), **kwargs) + find_root_task().add_done_callback(partial(_forcibly_shutdown_process_pool_on_exit, workers)) + + +# +# Sockets and networking +# + + +class StreamProtocol(asyncio.Protocol): + read_queue: Deque[bytes] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Optional[Exception] = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque() + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + cast(asyncio.Transport, transport).set_write_buffer_limits(0) + + def connection_lost(self, exc: Optional[Exception]) -> None: + if exc: + self.exception = BrokenResourceError() + self.exception.__cause__ = exc + + self.read_event.set() + self.write_event.set() + + def data_received(self, data: bytes) -> None: + self.read_queue.append(data) + self.read_event.set() + + def eof_received(self) -> Optional[bool]: + self.read_event.set() + return True + + def pause_writing(self) -> None: + self.write_event = asyncio.Event() + + def resume_writing(self) -> None: + self.write_event.set() + + +class DatagramProtocol(asyncio.DatagramProtocol): + read_queue: Deque[Tuple[bytes, IPSockAddrType]] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Optional[Exception] = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque(maxlen=100) # arbitrary value + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + + def connection_lost(self, exc: Optional[Exception]) -> None: + self.read_event.set() + self.write_event.set() + + def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: + addr = convert_ipv6_sockaddr(addr) + self.read_queue.append((data, addr)) + self.read_event.set() + + def error_received(self, exc: Exception) -> None: + self.exception = exc + + def pause_writing(self) -> None: + self.write_event.clear() + + def resume_writing(self) -> None: + self.write_event.set() + + +class SocketStream(abc.SocketStream): + def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info('socket') + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + await checkpoint() + + if not self._protocol.read_event.is_set() and not self._transport.is_closing(): + self._transport.resume_reading() + await self._protocol.read_event.wait() + self._transport.pause_reading() + + try: + chunk = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + elif self._protocol.exception: + raise self._protocol.exception + else: + raise EndOfStream from None + + if len(chunk) > max_bytes: + # Split the oversized chunk + chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] + self._protocol.read_queue.appendleft(leftover) + + # If the read queue is empty, clear the flag so that the next call will block until + # data is available + if not self._protocol.read_queue: + self._protocol.read_event.clear() + + return chunk + + async def send(self, item: bytes) -> None: + with self._send_guard: + await checkpoint() + + if self._closed: + raise ClosedResourceError + elif self._protocol.exception is not None: + raise self._protocol.exception + + try: + self._transport.write(item) + except RuntimeError as exc: + if self._transport.is_closing(): + raise BrokenResourceError from exc + else: + raise + + await self._protocol.write_event.wait() + + async def send_eof(self) -> None: + try: + self._transport.write_eof() + except OSError: + pass + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + try: + self._transport.write_eof() + except OSError: + pass + + self._transport.close() + await sleep(0) + self._transport.abort() + + +class UNIXSocketStream(abc.SocketStream): + _receive_future: Optional[asyncio.Future] = None + _send_future: Optional[asyncio.Future] = None + _closing = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._receive_future + loop.remove_reader(self.__raw_socket) + + f = self._receive_future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._send_future + loop.remove_writer(self.__raw_socket) + + f = self._send_future = asyncio.Future() + self._loop.add_writer(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + async def send_eof(self) -> None: + with self._send_guard: + self._raw_socket.shutdown(socket.SHUT_WR) + + async def receive(self, max_bytes: int = 65536) -> bytes: + loop = get_running_loop() + await checkpoint() + with self._receive_guard: + while True: + try: + data = self.__raw_socket.recv(max_bytes) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await checkpoint() + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = self.__raw_socket.send(item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + view = view[bytes_sent:] + + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError('msglen must be a non-negative integer') + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError('maxfds must be a positive integer') + + loop = get_running_loop() + fds = array.array("i") + await checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = self.__raw_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize)) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError(f'Received unexpected ancillary data; message = {message!r}, ' + f'cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}') + + fds.frombytes(cmsg_data[:len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[Union[int, IOBase]]) -> None: + if not message: + raise ValueError('message must not be empty') + if not fds: + raise ValueError('fds must not be empty') + + loop = get_running_loop() + filenos: List[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await checkpoint() + with self._send_guard: + while True: + try: + # The ignore can be removed after mypy picks up + # https://github.com/python/typeshed/pull/5545 + self.__raw_socket.sendmsg( + [message], + [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] + ) + break + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + if not self._closing: + self._closing = True + if self.__raw_socket.fileno() != -1: + self.__raw_socket.close() + + if self._receive_future: + self._receive_future.set_result(None) + if self._send_future: + self._send_future.set_result(None) + + +class TCPSocketListener(abc.SocketListener): + _accept_scope: Optional[CancelScope] = None + _closed = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) + self._accept_guard = ResourceGuard('accepting connections from') + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + async def accept(self) -> abc.SocketStream: + if self._closed: + raise ClosedResourceError + + with self._accept_guard: + await checkpoint() + with CancelScope() as self._accept_scope: + try: + client_sock, _addr = await self._loop.sock_accept(self._raw_socket) + except asyncio.CancelledError: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + if self._closed: + raise ClosedResourceError from None + + raise + finally: + self._accept_scope = None + + client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + transport, protocol = await self._loop.connect_accepted_socket(StreamProtocol, client_sock) + return SocketStream(cast(asyncio.Transport, transport), cast(StreamProtocol, protocol)) + + async def aclose(self) -> None: + if self._closed: + return + + self._closed = True + if self._accept_scope: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + self._accept_scope.cancel() + await sleep(0) + + self._raw_socket.close() + + +class UNIXSocketListener(abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._accept_guard = ResourceGuard('accepting connections from') + self._closed = False + + async def accept(self) -> abc.SocketStream: + await checkpoint() + with self._accept_guard: + while True: + try: + client_sock, _ = self.__raw_socket.accept() + client_sock.setblocking(False) + return UNIXSocketStream(client_sock) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(lambda _: self._loop.remove_reader(self.__raw_socket)) + await f + except OSError as exc: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + self._closed = True + self.__raw_socket.close() + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + +class UDPSocket(abc.UDPSocket): + def __init__(self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info('socket') + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> Tuple[bytes, IPSockAddrType]: + with self._receive_guard: + await checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + return self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + await checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(*item) + + +class ConnectedUDPSocket(abc.ConnectedUDPSocket): + def __init__(self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info('socket') + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> bytes: + with self._receive_guard: + await checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + packet = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + return packet[0] + + async def send(self, item: bytes) -> None: + with self._send_guard: + await checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(item) + + +async def connect_tcp(host: str, port: int, + local_addr: Optional[Tuple[str, int]] = None) -> SocketStream: + transport, protocol = cast( + Tuple[asyncio.Transport, StreamProtocol], + await get_running_loop().create_connection(StreamProtocol, host, port, + local_addr=local_addr) + ) + transport.pause_reading() + return SocketStream(transport, protocol) + + +async def connect_unix(path: str) -> UNIXSocketStream: + await checkpoint() + loop = get_running_loop() + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + while True: + try: + raw_socket.connect(path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return UNIXSocketStream(raw_socket) + + +async def create_udp_socket( + family: socket.AddressFamily, + local_address: Optional[IPSockAddrType], + remote_address: Optional[IPSockAddrType], + reuse_port: bool +) -> Union[UDPSocket, ConnectedUDPSocket]: + result = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, local_addr=local_address, remote_addr=remote_address, family=family, + reuse_port=reuse_port) + transport = cast(asyncio.DatagramTransport, result[0]) + protocol = cast(DatagramProtocol, result[1]) + if protocol.exception: + transport.close() + raise protocol.exception + + if not remote_address: + return UDPSocket(transport, protocol) + else: + return ConnectedUDPSocket(transport, protocol) + + +async def getaddrinfo(host: Union[bytearray, bytes, str], port: Union[str, int, None], *, + family: Union[int, AddressFamily] = 0, type: Union[int, SocketKind] = 0, + proto: int = 0, flags: int = 0) -> GetAddrInfoReturnType: + # https://github.com/python/typeshed/pull/4304 + result = await get_running_loop().getaddrinfo( + host, port, family=family, type=type, proto=proto, flags=flags) # type: ignore[arg-type] + return cast(GetAddrInfoReturnType, result) + + +async def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Tuple[str, str]: + return await get_running_loop().getnameinfo(sockaddr, flags) + + +_read_events: RunVar[Dict[Any, asyncio.Event]] = RunVar('read_events') +_write_events: RunVar[Dict[Any, asyncio.Event]] = RunVar('write_events') + + +async def wait_socket_readable(sock: socket.socket) -> None: + await checkpoint() + try: + read_events = _read_events.get() + except LookupError: + read_events = {} + _read_events.set(read_events) + + if read_events.get(sock): + raise BusyResourceError('reading from') from None + + loop = get_running_loop() + event = read_events[sock] = asyncio.Event() + loop.add_reader(sock, event.set) + try: + await event.wait() + finally: + if read_events.pop(sock, None) is not None: + loop.remove_reader(sock) + readable = True + else: + readable = False + + if not readable: + raise ClosedResourceError + + +async def wait_socket_writable(sock: socket.socket) -> None: + await checkpoint() + try: + write_events = _write_events.get() + except LookupError: + write_events = {} + _write_events.set(write_events) + + if write_events.get(sock): + raise BusyResourceError('writing to') from None + + loop = get_running_loop() + event = write_events[sock] = asyncio.Event() + loop.add_writer(sock.fileno(), event.set) + try: + await event.wait() + finally: + if write_events.pop(sock, None) is not None: + loop.remove_writer(sock) + writable = True + else: + writable = False + + if not writable: + raise ClosedResourceError + + +# +# Synchronization +# + +class Event(BaseEvent): + def __new__(cls) -> "Event": + return object.__new__(cls) + + def __init__(self) -> None: + self._event = asyncio.Event() + + def set(self) -> DeprecatedAwaitable: + self._event.set() + return DeprecatedAwaitable(self.set) + + def is_set(self) -> bool: + return self._event.is_set() + + async def wait(self) -> None: + if await self._event.wait(): + await checkpoint() + + def statistics(self) -> EventStatistics: + return EventStatistics(len(self._event._waiters)) # type: ignore[attr-defined] + + +class CapacityLimiter(BaseCapacityLimiter): + _total_tokens: float = 0 + + def __new__(cls, total_tokens: float) -> "CapacityLimiter": + return object.__new__(cls) + + def __init__(self, total_tokens: float): + self._borrowers: Set[Any] = set() + self._wait_queue: Dict[Any, asyncio.Event] = OrderedDict() + self.total_tokens = total_tokens + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.release() + + @property + def total_tokens(self) -> float: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and not math.isinf(value): + raise TypeError('total_tokens must be an int or math.inf') + if value < 1: + raise ValueError('total_tokens must be >= 1') + + old_value = self._total_tokens + self._total_tokens = value + events = [] + for event in self._wait_queue.values(): + if value <= old_value: + break + + if not event.is_set(): + events.append(event) + old_value += 1 + + for event in events: + event.set() + + @property + def borrowed_tokens(self) -> int: + return len(self._borrowers) + + @property + def available_tokens(self) -> float: + return self._total_tokens - len(self._borrowers) + + def acquire_nowait(self) -> DeprecatedAwaitable: + self.acquire_on_behalf_of_nowait(current_task()) + return DeprecatedAwaitable(self.acquire_nowait) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + if borrower in self._borrowers: + raise RuntimeError("this borrower is already holding one of this CapacityLimiter's " + "tokens") + + if self._wait_queue or len(self._borrowers) >= self._total_tokens: + raise WouldBlock + + self._borrowers.add(borrower) + return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) + + async def acquire(self) -> None: + return await self.acquire_on_behalf_of(current_task()) + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await checkpoint_if_cancelled() + try: + self.acquire_on_behalf_of_nowait(borrower) + except WouldBlock: + event = asyncio.Event() + self._wait_queue[borrower] = event + try: + await event.wait() + except BaseException: + self._wait_queue.pop(borrower, None) + raise + + self._borrowers.add(borrower) + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def release(self) -> None: + self.release_on_behalf_of(current_task()) + + def release_on_behalf_of(self, borrower: object) -> None: + try: + self._borrowers.remove(borrower) + except KeyError: + raise RuntimeError("this borrower isn't holding any of this CapacityLimiter's " + "tokens") from None + + # Notify the next task in line if this limiter has free capacity now + if self._wait_queue and len(self._borrowers) < self._total_tokens: + event = self._wait_queue.popitem()[1] + event.set() + + def statistics(self) -> CapacityLimiterStatistics: + return CapacityLimiterStatistics(self.borrowed_tokens, self.total_tokens, + tuple(self._borrowers), len(self._wait_queue)) + + +_default_thread_limiter: RunVar[CapacityLimiter] = RunVar('_default_thread_limiter') + + +def current_default_thread_limiter() -> CapacityLimiter: + try: + return _default_thread_limiter.get() + except LookupError: + limiter = CapacityLimiter(40) + _default_thread_limiter.set(limiter) + return limiter + + +# +# Operating system signals +# + +class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]): + def __init__(self, signals: Tuple[int, ...]): + self._signals = signals + self._loop = get_running_loop() + self._signal_queue: Deque[int] = deque() + self._future: asyncio.Future = asyncio.Future() + self._handled_signals: Set[int] = set() + + def _deliver(self, signum: int) -> None: + self._signal_queue.append(signum) + if not self._future.done(): + self._future.set_result(None) + + def __enter__(self) -> "_SignalReceiver": + for sig in set(self._signals): + self._loop.add_signal_handler(sig, self._deliver, sig) + self._handled_signals.add(sig) + + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + for sig in self._handled_signals: + self._loop.remove_signal_handler(sig) + return None + + def __aiter__(self) -> "_SignalReceiver": + return self + + async def __anext__(self) -> int: + await checkpoint() + if not self._signal_queue: + self._future = asyncio.Future() + await self._future + + return self._signal_queue.popleft() + + +def open_signal_receiver(*signals: int) -> _SignalReceiver: + return _SignalReceiver(signals) + + +# +# Testing and debugging +# + +def _create_task_info(task: asyncio.Task) -> TaskInfo: + task_state = _task_states.get(task) + if task_state is None: + name = task.get_name() if _native_task_names else None + parent_id = None + else: + name = task_state.name + parent_id = task_state.parent_id + + return TaskInfo(id(task), parent_id, name, get_coro(task)) + + +def get_current_task() -> TaskInfo: + return _create_task_info(current_task()) # type: ignore[arg-type] + + +def get_running_tasks() -> List[TaskInfo]: + return [_create_task_info(task) for task in all_tasks() if not task.done()] + + +async def wait_all_tasks_blocked() -> None: + await checkpoint() + this_task = current_task() + while True: + for task in all_tasks(): + if task is this_task: + continue + + if task._fut_waiter is None or task._fut_waiter.done(): # type: ignore[attr-defined] + await sleep(0.1) + break + else: + return + + +class TestRunner(abc.TestRunner): + def __init__(self, debug: bool = False, use_uvloop: bool = False, + policy: Optional[asyncio.AbstractEventLoopPolicy] = None): + _maybe_set_event_loop_policy(policy, use_uvloop) + self._loop = asyncio.new_event_loop() + self._loop.set_debug(debug) + asyncio.set_event_loop(self._loop) + + def _cancel_all_tasks(self) -> None: + to_cancel = all_tasks(self._loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + self._loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + raise cast(BaseException, task.exception()) + + def close(self) -> None: + try: + self._cancel_all_tasks() + self._loop.run_until_complete(self._loop.shutdown_asyncgens()) + finally: + asyncio.set_event_loop(None) + self._loop.close() + + def call(self, func: Callable[..., Awaitable[T_Retval]], + *args: object, **kwargs: object) -> T_Retval: + def exception_handler(loop: asyncio.AbstractEventLoop, context: Dict[str, Any]) -> None: + exceptions.append(context['exception']) + + exceptions: List[BaseException] = [] + self._loop.set_exception_handler(exception_handler) + try: + retval: T_Retval = self._loop.run_until_complete(func(*args, **kwargs)) + except Exception as exc: + retval = None # type: ignore[assignment] + exceptions.append(exc) + finally: + self._loop.set_exception_handler(None) + + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise ExceptionGroup(exceptions) + + return retval diff --git a/venv/lib/python3.10/site-packages/anyio/_backends/_trio.py b/venv/lib/python3.10/site-packages/anyio/_backends/_trio.py new file mode 100644 index 0000000..440e54f --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_backends/_trio.py @@ -0,0 +1,833 @@ +import array +import math +import socket +from concurrent.futures import Future +from contextvars import copy_context +from dataclasses import dataclass +from functools import partial +from io import IOBase +from os import PathLike +from signal import Signals +from types import TracebackType +from typing import ( + Any, Awaitable, Callable, Collection, ContextManager, Coroutine, Deque, Dict, Generic, List, + Mapping, NoReturn, Optional, Sequence, Set, Tuple, Type, TypeVar, Union, cast) + +import sniffio +import trio.from_thread +from outcome import Error, Outcome, Value +from trio.socket import SocketType as TrioSocketType +from trio.to_thread import run_sync + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable, T +from .._core._eventloop import claim_worker_thread +from .._core._exceptions import ( + BrokenResourceError, BusyResourceError, ClosedResourceError, EndOfStream) +from .._core._exceptions import ExceptionGroup as BaseExceptionGroup +from .._core._sockets import convert_ipv6_sockaddr +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType + +try: + from trio import lowlevel as trio_lowlevel +except ImportError: + from trio import hazmat as trio_lowlevel # type: ignore[no-redef] + from trio.hazmat import wait_readable, wait_writable +else: + from trio.lowlevel import wait_readable, wait_writable + +try: + from trio.lowlevel import open_process as trio_open_process # type: ignore[attr-defined] +except ImportError: + from trio import open_process as trio_open_process + +T_Retval = TypeVar('T_Retval') +T_SockAddr = TypeVar('T_SockAddr', str, IPSockAddrType) + + +# +# Event loop +# + +run = trio.run +current_token = trio.lowlevel.current_trio_token +RunVar = trio.lowlevel.RunVar + + +# +# Miscellaneous +# + +sleep = trio.sleep + + +# +# Timeouts and cancellation +# + +class CancelScope(BaseCancelScope): + def __new__(cls, original: Optional[trio.CancelScope] = None, + **kwargs: object) -> 'CancelScope': + return object.__new__(cls) + + def __init__(self, original: Optional[trio.CancelScope] = None, **kwargs: Any) -> None: + self.__original = original or trio.CancelScope(**kwargs) + + def __enter__(self) -> 'CancelScope': + self.__original.__enter__() + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return self.__original.__exit__(exc_type, exc_val, exc_tb) + + def cancel(self) -> DeprecatedAwaitable: + self.__original.cancel() + return DeprecatedAwaitable(self.cancel) + + @property + def deadline(self) -> float: + return self.__original.deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self.__original.deadline = value + + @property + def cancel_called(self) -> bool: + return self.__original.cancel_called + + @property + def shield(self) -> bool: + return self.__original.shield + + @shield.setter + def shield(self, value: bool) -> None: + self.__original.shield = value + + +CancelledError = trio.Cancelled +checkpoint = trio.lowlevel.checkpoint +checkpoint_if_cancelled = trio.lowlevel.checkpoint_if_cancelled +cancel_shielded_checkpoint = trio.lowlevel.cancel_shielded_checkpoint +current_effective_deadline = trio.current_effective_deadline +current_time = trio.current_time + + +# +# Task groups +# + +class ExceptionGroup(BaseExceptionGroup, trio.MultiError): + pass + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self._active = False + self._nursery_manager = trio.open_nursery() + self.cancel_scope = None # type: ignore[assignment] + + async def __aenter__(self) -> 'TaskGroup': + self._active = True + self._nursery = await self._nursery_manager.__aenter__() + self.cancel_scope = CancelScope(self._nursery.cancel_scope) + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + try: + return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) + except trio.MultiError as exc: + raise ExceptionGroup(exc.exceptions) from None + finally: + self._active = False + + def start_soon(self, func: Callable, *args: object, name: object = None) -> None: + if not self._active: + raise RuntimeError('This task group is not active; no new tasks can be started.') + + self._nursery.start_soon(func, *args, name=name) + + async def start(self, func: Callable[..., Coroutine], + *args: object, name: object = None) -> object: + if not self._active: + raise RuntimeError('This task group is not active; no new tasks can be started.') + + return await self._nursery.start(func, *args, name=name) + +# +# Threads +# + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional[trio.CapacityLimiter] = None) -> T_Retval: + def wrapper() -> T_Retval: + with claim_worker_thread('trio'): + return func(*args) + + # TODO: remove explicit context copying when trio 0.20 is the minimum requirement + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, None) + return await run_sync(context.run, wrapper, cancellable=cancellable, limiter=limiter) + + +# TODO: remove this workaround when trio 0.20 is the minimum requirement +def run_async_from_thread(fn: Callable[..., Awaitable[T_Retval]], *args: Any) -> T_Retval: + async def wrapper() -> T_Retval: + retval: T_Retval + + async def inner() -> None: + nonlocal retval + __tracebackhide__ = True + retval = await fn(*args) + + async with trio.open_nursery() as n: + context.run(n.start_soon, inner) + + __tracebackhide__ = True + return retval + + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, 'trio') + return trio.from_thread.run(wrapper) + + +def run_sync_from_thread(fn: Callable[..., T_Retval], *args: Any) -> T_Retval: + # TODO: remove explicit context copying when trio 0.20 is the minimum requirement + retval = trio.from_thread.run_sync(copy_context().run, fn, *args) + return cast(T_Retval, retval) + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> 'BlockingPortal': + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._token = trio.lowlevel.current_trio_token() + + def _spawn_task_from_thread(self, func: Callable, args: tuple, kwargs: Dict[str, Any], + name: object, future: Future) -> None: + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, 'trio') + trio.from_thread.run_sync( + context.run, partial(self._task_group.start_soon, name=name), self._call_func, func, + args, kwargs, future, trio_token=self._token) + + +# +# Subprocesses +# + +@dataclass(eq=False) +class ReceiveStreamWrapper(abc.ByteReceiveStream): + _stream: trio.abc.ReceiveStream + + async def receive(self, max_bytes: Optional[int] = None) -> bytes: + try: + data = await self._stream.receive_some(max_bytes) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class SendStreamWrapper(abc.ByteSendStream): + _stream: trio.abc.SendStream + + async def send(self, item: bytes) -> None: + try: + await self._stream.send_all(item) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: trio.Process + _stdin: Optional[abc.ByteSendStream] + _stdout: Optional[abc.ByteReceiveStream] + _stderr: Optional[abc.ByteReceiveStream] + + async def aclose(self) -> None: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + await self.wait() + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: Signals) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> Optional[int]: + return self._process.returncode + + @property + def stdin(self) -> Optional[abc.ByteSendStream]: + return self._stdin + + @property + def stdout(self) -> Optional[abc.ByteReceiveStream]: + return self._stdout + + @property + def stderr(self) -> Optional[abc.ByteReceiveStream]: + return self._stderr + + +async def open_process(command: Union[str, Sequence[str]], *, shell: bool, + stdin: int, stdout: int, stderr: int, + cwd: Union[str, bytes, PathLike, None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False) -> Process: + process = await trio_open_process(command, stdin=stdin, stdout=stdout, stderr=stderr, + shell=shell, cwd=cwd, env=env, + start_new_session=start_new_session) + stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None + stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None + stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + +class _ProcessPoolShutdownInstrument(trio.abc.Instrument): + def after_run(self) -> None: + super().after_run() + + +current_default_worker_process_limiter: RunVar = RunVar( + 'current_default_worker_process_limiter') + + +async def _shutdown_process_pool(workers: Set[Process]) -> None: + process: Process + try: + await sleep(math.inf) + except trio.Cancelled: + for process in workers: + if process.returncode is None: + process.kill() + + with CancelScope(shield=True): + for process in workers: + await process.aclose() + + +def setup_process_pool_exit_at_shutdown(workers: Set[Process]) -> None: + trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) + + +# +# Sockets and networking +# + +class _TrioSocketMixin(Generic[T_SockAddr]): + def __init__(self, trio_socket: TrioSocketType) -> None: + self._trio_socket = trio_socket + self._closed = False + + def _check_closed(self) -> None: + if self._closed: + raise ClosedResourceError + if self._trio_socket.fileno() < 0: + raise BrokenResourceError + + @property + def _raw_socket(self) -> socket.socket: + return self._trio_socket._sock # type: ignore[attr-defined] + + async def aclose(self) -> None: + if self._trio_socket.fileno() >= 0: + self._closed = True + self._trio_socket.close() + + def _convert_socket_error(self, exc: BaseException) -> 'NoReturn': + if isinstance(exc, trio.ClosedResourceError): + raise ClosedResourceError from exc + elif self._trio_socket.fileno() < 0 and self._closed: + raise ClosedResourceError from None + elif isinstance(exc, OSError): + raise BrokenResourceError from exc + else: + raise exc + + +class SocketStream(_TrioSocketMixin, abc.SocketStream): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + try: + data = await self._trio_socket.recv(max_bytes) + except BaseException as exc: + self._convert_socket_error(exc) + + if data: + return data + else: + raise EndOfStream + + async def send(self, item: bytes) -> None: + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = await self._trio_socket.send(view) + except BaseException as exc: + self._convert_socket_error(exc) + + view = view[bytes_sent:] + + async def send_eof(self) -> None: + self._trio_socket.shutdown(socket.SHUT_WR) + + +class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError('msglen must be a non-negative integer') + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError('maxfds must be a positive integer') + + fds = array.array("i") + await checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = await self._trio_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize)) + except BaseException as exc: + self._convert_socket_error(exc) + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError(f'Received unexpected ancillary data; message = {message!r}, ' + f'cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}') + + fds.frombytes(cmsg_data[:len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[Union[int, IOBase]]) -> None: + if not message: + raise ValueError('message must not be empty') + if not fds: + raise ValueError('fds must not be empty') + + filenos: List[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await checkpoint() + with self._send_guard: + while True: + try: + await self._trio_socket.sendmsg( + [message], + [(socket.SOL_SOCKET, socket.SCM_RIGHTS, # type: ignore[list-item] + fdarray)] + ) + break + except BaseException as exc: + self._convert_socket_error(exc) + + +class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard('accepting connections from') + + async def accept(self) -> SocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SocketStream(trio_socket) + + +class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard('accepting connections from') + + async def accept(self) -> UNIXSocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + return UNIXSocketStream(trio_socket) + + +class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + + async def receive(self) -> Tuple[bytes, IPSockAddrType]: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, convert_ipv6_sockaddr(addr) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +async def connect_tcp(host: str, port: int, + local_address: Optional[IPSockAddrType] = None) -> SocketStream: + family = socket.AF_INET6 if ':' in host else socket.AF_INET + trio_socket = trio.socket.socket(family) + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if local_address: + await trio_socket.bind(local_address) + + try: + await trio_socket.connect((host, port)) + except BaseException: + trio_socket.close() + raise + + return SocketStream(trio_socket) + + +async def connect_unix(path: str) -> UNIXSocketStream: + trio_socket = trio.socket.socket(socket.AF_UNIX) + try: + await trio_socket.connect(path) + except BaseException: + trio_socket.close() + raise + + return UNIXSocketStream(trio_socket) + + +async def create_udp_socket( + family: socket.AddressFamily, + local_address: Optional[IPSockAddrType], + remote_address: Optional[IPSockAddrType], + reuse_port: bool +) -> Union[UDPSocket, ConnectedUDPSocket]: + trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) + + if reuse_port: + trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + if local_address: + await trio_socket.bind(local_address) + + if remote_address: + await trio_socket.connect(remote_address) + return ConnectedUDPSocket(trio_socket) + else: + return UDPSocket(trio_socket) + + +getaddrinfo = trio.socket.getaddrinfo +getnameinfo = trio.socket.getnameinfo + + +async def wait_socket_readable(sock: socket.socket) -> None: + try: + await wait_readable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError('reading from') from None + + +async def wait_socket_writable(sock: socket.socket) -> None: + try: + await wait_writable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError('writing to') from None + + +# +# Synchronization +# + +class Event(BaseEvent): + def __new__(cls) -> 'Event': + return object.__new__(cls) + + def __init__(self) -> None: + self.__original = trio.Event() + + def is_set(self) -> bool: + return self.__original.is_set() + + async def wait(self) -> None: + return await self.__original.wait() + + def statistics(self) -> EventStatistics: + orig_statistics = self.__original.statistics() + return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) + + def set(self) -> DeprecatedAwaitable: + self.__original.set() + return DeprecatedAwaitable(self.set) + + +class CapacityLimiter(BaseCapacityLimiter): + def __new__(cls, *args: object, **kwargs: object) -> "CapacityLimiter": + return object.__new__(cls) + + def __init__(self, *args: Any, original: Optional[trio.CapacityLimiter] = None) -> None: + self.__original = original or trio.CapacityLimiter(*args) + + async def __aenter__(self) -> None: + return await self.__original.__aenter__() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return await self.__original.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + return self.__original.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + self.__original.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + return self.__original.borrowed_tokens + + @property + def available_tokens(self) -> float: + return self.__original.available_tokens + + def acquire_nowait(self) -> DeprecatedAwaitable: + self.__original.acquire_nowait() + return DeprecatedAwaitable(self.acquire_nowait) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + self.__original.acquire_on_behalf_of_nowait(borrower) + return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) + + async def acquire(self) -> None: + await self.__original.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self.__original.acquire_on_behalf_of(borrower) + + def release(self) -> None: + return self.__original.release() + + def release_on_behalf_of(self, borrower: object) -> None: + return self.__original.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + orig = self.__original.statistics() + return CapacityLimiterStatistics( + borrowed_tokens=orig.borrowed_tokens, total_tokens=orig.total_tokens, + borrowers=orig.borrowers, tasks_waiting=orig.tasks_waiting) + + +_capacity_limiter_wrapper: RunVar = RunVar('_capacity_limiter_wrapper') + + +def current_default_thread_limiter() -> CapacityLimiter: + try: + return _capacity_limiter_wrapper.get() + except LookupError: + limiter = CapacityLimiter(original=trio.to_thread.current_default_thread_limiter()) + _capacity_limiter_wrapper.set(limiter) + return limiter + + +# +# Signal handling +# + +class _SignalReceiver(DeprecatedAsyncContextManager[T]): + def __init__(self, cm: ContextManager[T]): + self._cm = cm + + def __enter__(self) -> T: + return self._cm.__enter__() + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + +def open_signal_receiver(*signals: Signals) -> _SignalReceiver: + cm = trio.open_signal_receiver(*signals) + return _SignalReceiver(cm) + +# +# Testing and debugging +# + + +def get_current_task() -> TaskInfo: + task = trio_lowlevel.current_task() + + parent_id = None + if task.parent_nursery and task.parent_nursery.parent_task: + parent_id = id(task.parent_nursery.parent_task) + + return TaskInfo(id(task), parent_id, task.name, task.coro) + + +def get_running_tasks() -> List[TaskInfo]: + root_task = trio_lowlevel.current_root_task() + task_infos = [TaskInfo(id(root_task), None, root_task.name, root_task.coro)] + nurseries = root_task.child_nurseries + while nurseries: + new_nurseries: List[trio.Nursery] = [] + for nursery in nurseries: + for task in nursery.child_tasks: + task_infos.append( + TaskInfo(id(task), id(nursery.parent_task), task.name, task.coro)) + new_nurseries.extend(task.child_nurseries) + + nurseries = new_nurseries + + return task_infos + + +def wait_all_tasks_blocked() -> Awaitable[None]: + import trio.testing + return trio.testing.wait_all_tasks_blocked() + + +class TestRunner(abc.TestRunner): + def __init__(self, **options: Any) -> None: + from collections import deque + from queue import Queue + + self._call_queue: "Queue[Callable[..., object]]" = Queue() + self._result_queue: Deque[Outcome] = deque() + self._stop_event: Optional[trio.Event] = None + self._nursery: Optional[trio.Nursery] = None + self._options = options + + async def _trio_main(self) -> None: + self._stop_event = trio.Event() + async with trio.open_nursery() as self._nursery: + await self._stop_event.wait() + + async def _call_func(self, func: Callable[..., Awaitable[object]], + args: tuple, kwargs: dict) -> None: + try: + retval = await func(*args, **kwargs) + except BaseException as exc: + self._result_queue.append(Error(exc)) + else: + self._result_queue.append(Value(retval)) + + def _main_task_finished(self, outcome: object) -> None: + self._nursery = None + + def close(self) -> None: + if self._stop_event: + self._stop_event.set() + while self._nursery is not None: + self._call_queue.get()() + + def call(self, func: Callable[..., Awaitable[T_Retval]], + *args: object, **kwargs: object) -> T_Retval: + if self._nursery is None: + trio.lowlevel.start_guest_run( + self._trio_main, run_sync_soon_threadsafe=self._call_queue.put, + done_callback=self._main_task_finished, **self._options) + while self._nursery is None: + self._call_queue.get()() + + self._nursery.start_soon(self._call_func, func, args, kwargs) + while not self._result_queue: + self._call_queue.get()() + + outcome = self._result_queue.pop() + return outcome.unwrap() diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__init__.py b/venv/lib/python3.10/site-packages/anyio/_core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..131b3a5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_compat.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_compat.cpython-310.pyc new file mode 100644 index 0000000..483cc86 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_compat.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_eventloop.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_eventloop.cpython-310.pyc new file mode 100644 index 0000000..095bd5d Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_eventloop.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_exceptions.cpython-310.pyc new file mode 100644 index 0000000..75c352b Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_exceptions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_fileio.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_fileio.cpython-310.pyc new file mode 100644 index 0000000..c6a5651 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_fileio.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_resources.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_resources.cpython-310.pyc new file mode 100644 index 0000000..772585b Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_resources.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_signals.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_signals.cpython-310.pyc new file mode 100644 index 0000000..4cd0d53 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_signals.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_sockets.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_sockets.cpython-310.pyc new file mode 100644 index 0000000..7d41ae9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_sockets.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_streams.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_streams.cpython-310.pyc new file mode 100644 index 0000000..3af68f3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_streams.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-310.pyc new file mode 100644 index 0000000..87f4cd1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_synchronization.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_synchronization.cpython-310.pyc new file mode 100644 index 0000000..3605bce Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_synchronization.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_tasks.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_tasks.cpython-310.pyc new file mode 100644 index 0000000..3bb6601 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_tasks.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_testing.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_testing.cpython-310.pyc new file mode 100644 index 0000000..3675926 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_testing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_typedattr.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_typedattr.cpython-310.pyc new file mode 100644 index 0000000..9955eee Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_typedattr.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/_core/_compat.py b/venv/lib/python3.10/site-packages/anyio/_core/_compat.py new file mode 100644 index 0000000..8a0cfd0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_core/_compat.py @@ -0,0 +1,175 @@ +from abc import ABCMeta, abstractmethod +from contextlib import AbstractContextManager +from types import TracebackType +from typing import ( + TYPE_CHECKING, Any, AsyncContextManager, Callable, ContextManager, Generator, Generic, + Iterable, List, Optional, Tuple, Type, TypeVar, Union, overload) +from warnings import warn + +if TYPE_CHECKING: + from ._testing import TaskInfo +else: + TaskInfo = object + +T = TypeVar('T') +AnyDeprecatedAwaitable = Union['DeprecatedAwaitable', 'DeprecatedAwaitableFloat', + 'DeprecatedAwaitableList[T]', TaskInfo] + + +@overload +async def maybe_async(__obj: TaskInfo) -> TaskInfo: + ... + + +@overload +async def maybe_async(__obj: 'DeprecatedAwaitableFloat') -> float: + ... + + +@overload +async def maybe_async(__obj: 'DeprecatedAwaitableList[T]') -> List[T]: + ... + + +@overload +async def maybe_async(__obj: 'DeprecatedAwaitable') -> None: + ... + + +async def maybe_async(__obj: 'AnyDeprecatedAwaitable[T]') -> Union[TaskInfo, float, List[T], None]: + """ + Await on the given object if necessary. + + This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and + methods were converted from coroutine functions into regular functions. + + Do **not** try to use this for any other purpose! + + :return: the result of awaiting on the object if coroutine, or the object itself otherwise + + .. versionadded:: 2.2 + + """ + return __obj._unwrap() + + +class _ContextManagerWrapper: + def __init__(self, cm: ContextManager[T]): + self._cm = cm + + async def __aenter__(self) -> T: + return self._cm.__enter__() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + +def maybe_async_cm(cm: Union[ContextManager[T], AsyncContextManager[T]]) -> AsyncContextManager[T]: + """ + Wrap a regular context manager as an async one if necessary. + + This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and + methods were changed to return regular context managers instead of async ones. + + :param cm: a regular or async context manager + :return: an async context manager + + .. versionadded:: 2.2 + + """ + if not isinstance(cm, AbstractContextManager): + raise TypeError('Given object is not an context manager') + + return _ContextManagerWrapper(cm) + + +def _warn_deprecation(awaitable: 'AnyDeprecatedAwaitable[Any]', stacklevel: int = 1) -> None: + warn(f'Awaiting on {awaitable._name}() is deprecated. Use "await ' + f'anyio.maybe_async({awaitable._name}(...)) if you have to support both AnyIO 2.x ' + f'and 3.x, or just remove the "await" if you are completely migrating to AnyIO 3+.', + DeprecationWarning, stacklevel=stacklevel + 1) + + +class DeprecatedAwaitable: + def __init__(self, func: Callable[..., 'DeprecatedAwaitable']): + self._name = f'{func.__module__}.{func.__qualname__}' + + def __await__(self) -> Generator[None, None, None]: + _warn_deprecation(self) + if False: + yield + + def __reduce__(self) -> Tuple[Type[None], Tuple[()]]: + return type(None), () + + def _unwrap(self) -> None: + return None + + +class DeprecatedAwaitableFloat(float): + def __new__( + cls, x: float, func: Callable[..., 'DeprecatedAwaitableFloat'] + ) -> 'DeprecatedAwaitableFloat': + return super().__new__(cls, x) + + def __init__(self, x: float, func: Callable[..., 'DeprecatedAwaitableFloat']): + self._name = f'{func.__module__}.{func.__qualname__}' + + def __await__(self) -> Generator[None, None, float]: + _warn_deprecation(self) + if False: + yield + + return float(self) + + def __reduce__(self) -> Tuple[Type[float], Tuple[float]]: + return float, (float(self),) + + def _unwrap(self) -> float: + return float(self) + + +class DeprecatedAwaitableList(List[T]): + def __init__(self, iterable: Iterable[T] = (), *, + func: Callable[..., 'DeprecatedAwaitableList[T]']): + super().__init__(iterable) + self._name = f'{func.__module__}.{func.__qualname__}' + + def __await__(self) -> Generator[None, None, List[T]]: + _warn_deprecation(self) + if False: + yield + + return list(self) + + def __reduce__(self) -> Tuple[Type[List[T]], Tuple[List[T]]]: + return list, (list(self),) + + def _unwrap(self) -> List[T]: + return list(self) + + +class DeprecatedAsyncContextManager(Generic[T], metaclass=ABCMeta): + @abstractmethod + def __enter__(self) -> T: + pass + + @abstractmethod + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + pass + + async def __aenter__(self) -> T: + warn(f'Using {self.__class__.__name__} as an async context manager has been deprecated. ' + f'Use "async with anyio.maybe_async_cm(yourcontextmanager) as foo:" if you have to ' + f'support both AnyIO 2.x and 3.x, or just remove the "async" from "async with" if ' + f'you are completely migrating to AnyIO 3+.', DeprecationWarning) + return self.__enter__() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return self.__exit__(exc_type, exc_val, exc_tb) diff --git a/venv/lib/python3.10/site-packages/anyio/_core/_eventloop.py b/venv/lib/python3.10/site-packages/anyio/_core/_eventloop.py new file mode 100644 index 0000000..9de0a84 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_core/_eventloop.py @@ -0,0 +1,140 @@ +import math +import sys +import threading +from contextlib import contextmanager +from importlib import import_module +from typing import Any, Callable, Coroutine, Dict, Generator, Optional, Tuple, Type, TypeVar + +import sniffio + +# This must be updated when new backends are introduced +from ._compat import DeprecatedAwaitableFloat + +BACKENDS = 'asyncio', 'trio' + +T_Retval = TypeVar('T_Retval') +threadlocals = threading.local() + + +def run(func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object, + backend: str = 'asyncio', backend_options: Optional[Dict[str, Any]] = None) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param backend: name of the asynchronous event loop implementation – currently either + ``asyncio`` or ``trio`` + :param backend_options: keyword arguments to call the backend ``run()`` implementation with + (documented :ref:`here `) + :return: the return value of the coroutine function + :raises RuntimeError: if an asynchronous event loop is already running in this thread + :raises LookupError: if the named backend is not found + + """ + try: + asynclib_name = sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + pass + else: + raise RuntimeError(f'Already running {asynclib_name} in this thread') + + try: + asynclib = import_module(f'..._backends._{backend}', package=__name__) + except ImportError as exc: + raise LookupError(f'No such backend: {backend}') from exc + + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async library + token = sniffio.current_async_library_cvar.set(backend) + + try: + backend_options = backend_options or {} + return asynclib.run(func, *args, **backend_options) + finally: + if token: + sniffio.current_async_library_cvar.reset(token) + + +async def sleep(delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + + """ + return await get_asynclib().sleep(delay) + + +async def sleep_forever() -> None: + """ + Pause the current task until it's cancelled. + + This is a shortcut for ``sleep(math.inf)``. + + .. versionadded:: 3.1 + + """ + await sleep(math.inf) + + +async def sleep_until(deadline: float) -> None: + """ + Pause the current task until the given time. + + :param deadline: the absolute time to wake up at (according to the internal monotonic clock of + the event loop) + + .. versionadded:: 3.1 + + """ + now = current_time() + await sleep(max(deadline - now, 0)) + + +def current_time() -> DeprecatedAwaitableFloat: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + + """ + return DeprecatedAwaitableFloat(get_asynclib().current_time(), current_time) + + +def get_all_backends() -> Tuple[str, ...]: + """Return a tuple of the names of all built-in backends.""" + return BACKENDS + + +def get_cancelled_exc_class() -> Type[BaseException]: + """Return the current async library's cancellation exception class.""" + return get_asynclib().CancelledError + + +# +# Private API +# + +@contextmanager +def claim_worker_thread(backend: str) -> Generator[Any, None, None]: + module = sys.modules['anyio._backends._' + backend] + threadlocals.current_async_module = module + try: + yield + finally: + del threadlocals.current_async_module + + +def get_asynclib(asynclib_name: Optional[str] = None) -> Any: + if asynclib_name is None: + asynclib_name = sniffio.current_async_library() + + modulename = 'anyio._backends._' + asynclib_name + try: + return sys.modules[modulename] + except KeyError: + return import_module(modulename) diff --git a/venv/lib/python3.10/site-packages/anyio/_core/_exceptions.py b/venv/lib/python3.10/site-packages/anyio/_core/_exceptions.py new file mode 100644 index 0000000..06db05d --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_core/_exceptions.py @@ -0,0 +1,85 @@ +from traceback import format_exception +from typing import List + + +class BrokenResourceError(Exception): + """ + Raised when trying to use a resource that has been rendered unusable due to external causes + (e.g. a send stream whose peer has disconnected). + """ + + +class BrokenWorkerProcess(Exception): + """ + Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or otherwise + misbehaves. + """ + + +class BusyResourceError(Exception): + """Raised when two tasks are trying to read from or write to the same resource concurrently.""" + + def __init__(self, action: str): + super().__init__(f'Another task is already {action} this resource') + + +class ClosedResourceError(Exception): + """Raised when trying to use a resource that has been closed.""" + + +class DelimiterNotFound(Exception): + """ + Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + maximum number of bytes has been read without the delimiter being found. + """ + + def __init__(self, max_bytes: int) -> None: + super().__init__(f'The delimiter was not found among the first {max_bytes} bytes') + + +class EndOfStream(Exception): + """Raised when trying to read from a stream that has been closed from the other end.""" + + +class ExceptionGroup(BaseException): + """ + Raised when multiple exceptions have been raised in a task group. + + :var ~typing.Sequence[BaseException] exceptions: the sequence of exceptions raised together + """ + + SEPARATOR = '----------------------------\n' + + exceptions: List[BaseException] + + def __str__(self) -> str: + tracebacks = [''.join(format_exception(type(exc), exc, exc.__traceback__)) + for exc in self.exceptions] + return f'{len(self.exceptions)} exceptions were raised in the task group:\n' \ + f'{self.SEPARATOR}{self.SEPARATOR.join(tracebacks)}' + + def __repr__(self) -> str: + exception_reprs = ', '.join(repr(exc) for exc in self.exceptions) + return f'<{self.__class__.__name__}: {exception_reprs}>' + + +class IncompleteRead(Exception): + """ + Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + connection is closed before the requested amount of bytes has been read. + """ + + def __init__(self) -> None: + super().__init__('The stream was closed before the read operation could be completed') + + +class TypedAttributeLookupError(LookupError): + """ + Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute is not + found and no default value has been given. + """ + + +class WouldBlock(Exception): + """Raised by ``X_nowait`` functions if ``X()`` would block.""" diff --git a/venv/lib/python3.10/site-packages/anyio/_core/_fileio.py b/venv/lib/python3.10/site-packages/anyio/_core/_fileio.py new file mode 100644 index 0000000..2aee21a --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_core/_fileio.py @@ -0,0 +1,529 @@ +import os +import pathlib +import sys +from dataclasses import dataclass +from functools import partial +from os import PathLike +from typing import ( + IO, TYPE_CHECKING, Any, AnyStr, AsyncIterator, Callable, Generic, Iterable, Iterator, List, + Optional, Sequence, Tuple, Union, cast, overload) + +from .. import to_thread +from ..abc import AsyncResource + +if sys.version_info >= (3, 8): + from typing import Final +else: + from typing_extensions import Final + +if TYPE_CHECKING: + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer +else: + ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object + + +class AsyncFile(AsyncResource, Generic[AnyStr]): + """ + An asynchronous file object. + + This class wraps a standard file object and provides async friendly versions of the following + blocking methods (where available on the original file object): + + * read + * read1 + * readline + * readlines + * readinto + * readinto1 + * write + * writelines + * truncate + * seek + * tell + * flush + + All other methods are directly passed through. + + This class supports the asynchronous context manager protocol which closes the underlying file + at the end of the context block. + + This class also supports asynchronous iteration:: + + async with await open_file(...) as f: + async for line in f: + print(line) + """ + + def __init__(self, fp: IO[AnyStr]) -> None: + self._fp: Any = fp + + def __getattr__(self, name: str) -> object: + return getattr(self._fp, name) + + @property + def wrapped(self) -> IO[AnyStr]: + """The wrapped file object.""" + return self._fp + + async def __aiter__(self) -> AsyncIterator[AnyStr]: + while True: + line = await self.readline() + if line: + yield line + else: + break + + async def aclose(self) -> None: + return await to_thread.run_sync(self._fp.close) + + async def read(self, size: int = -1) -> AnyStr: + return await to_thread.run_sync(self._fp.read, size) + + async def read1(self: 'AsyncFile[bytes]', size: int = -1) -> bytes: + return await to_thread.run_sync(self._fp.read1, size) + + async def readline(self) -> AnyStr: + return await to_thread.run_sync(self._fp.readline) + + async def readlines(self) -> List[AnyStr]: + return await to_thread.run_sync(self._fp.readlines) + + async def readinto(self: 'AsyncFile[bytes]', b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto, b) + + async def readinto1(self: 'AsyncFile[bytes]', b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto1, b) + + @overload + async def write(self: 'AsyncFile[bytes]', b: ReadableBuffer) -> int: ... + + @overload + async def write(self: 'AsyncFile[str]', b: str) -> int: ... + + async def write(self, b: Union[ReadableBuffer, str]) -> int: + return await to_thread.run_sync(self._fp.write, b) + + @overload + async def writelines(self: 'AsyncFile[bytes]', lines: Iterable[ReadableBuffer]) -> None: ... + + @overload + async def writelines(self: 'AsyncFile[str]', lines: Iterable[str]) -> None: ... + + async def writelines(self, lines: Union[Iterable[ReadableBuffer], Iterable[str]]) -> None: + return await to_thread.run_sync(self._fp.writelines, lines) + + async def truncate(self, size: Optional[int] = None) -> int: + return await to_thread.run_sync(self._fp.truncate, size) + + async def seek(self, offset: int, whence: Optional[int] = os.SEEK_SET) -> int: + return await to_thread.run_sync(self._fp.seek, offset, whence) + + async def tell(self) -> int: + return await to_thread.run_sync(self._fp.tell) + + async def flush(self) -> None: + return await to_thread.run_sync(self._fp.flush) + + +@overload +async def open_file(file: Union[str, 'PathLike[str]', int], mode: OpenBinaryMode, + buffering: int = ..., encoding: Optional[str] = ..., + errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ..., + opener: Optional[Callable[[str, int], int]] = ...) -> AsyncFile[bytes]: + ... + + +@overload +async def open_file(file: Union[str, 'PathLike[str]', int], mode: OpenTextMode = ..., + buffering: int = ..., encoding: Optional[str] = ..., + errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ..., + opener: Optional[Callable[[str, int], int]] = ...) -> AsyncFile[str]: + ... + + +async def open_file(file: Union[str, 'PathLike[str]', int], mode: str = 'r', buffering: int = -1, + encoding: Optional[str] = None, errors: Optional[str] = None, + newline: Optional[str] = None, closefd: bool = True, + opener: Optional[Callable[[str, int], int]] = None) -> AsyncFile[Any]: + """ + Open a file asynchronously. + + The arguments are exactly the same as for the builtin :func:`open`. + + :return: an asynchronous file object + + """ + fp = await to_thread.run_sync(open, file, mode, buffering, encoding, errors, newline, + closefd, opener) + return AsyncFile(fp) + + +def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: + """ + Wrap an existing file as an asynchronous file. + + :param file: an existing file-like object + :return: an asynchronous file object + + """ + return AsyncFile(file) + + +@dataclass(eq=False) +class _PathIterator(AsyncIterator['Path']): + iterator: Iterator['PathLike[str]'] + + async def __anext__(self) -> 'Path': + nextval = await to_thread.run_sync(next, self.iterator, None, cancellable=True) + if nextval is None: + raise StopAsyncIteration from None + + return Path(cast('PathLike[str]', nextval)) + + +class Path: + """ + An asynchronous version of :class:`pathlib.Path`. + + This class cannot be substituted for :class:`pathlib.Path` or :class:`pathlib.PurePath`, but + it is compatible with the :class:`os.PathLike` interface. + + It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for the + deprecated :meth:`~pathlib.Path.link_to` method. + + Any methods that do disk I/O need to be awaited on. These methods are: + + * :meth:`~pathlib.Path.absolute` + * :meth:`~pathlib.Path.chmod` + * :meth:`~pathlib.Path.cwd` + * :meth:`~pathlib.Path.exists` + * :meth:`~pathlib.Path.expanduser` + * :meth:`~pathlib.Path.group` + * :meth:`~pathlib.Path.hardlink_to` + * :meth:`~pathlib.Path.home` + * :meth:`~pathlib.Path.is_block_device` + * :meth:`~pathlib.Path.is_char_device` + * :meth:`~pathlib.Path.is_dir` + * :meth:`~pathlib.Path.is_fifo` + * :meth:`~pathlib.Path.is_file` + * :meth:`~pathlib.Path.is_mount` + * :meth:`~pathlib.Path.lchmod` + * :meth:`~pathlib.Path.lstat` + * :meth:`~pathlib.Path.mkdir` + * :meth:`~pathlib.Path.open` + * :meth:`~pathlib.Path.owner` + * :meth:`~pathlib.Path.read_bytes` + * :meth:`~pathlib.Path.read_text` + * :meth:`~pathlib.Path.readlink` + * :meth:`~pathlib.Path.rename` + * :meth:`~pathlib.Path.replace` + * :meth:`~pathlib.Path.rmdir` + * :meth:`~pathlib.Path.samefile` + * :meth:`~pathlib.Path.stat` + * :meth:`~pathlib.Path.touch` + * :meth:`~pathlib.Path.unlink` + * :meth:`~pathlib.Path.write_bytes` + * :meth:`~pathlib.Path.write_text` + + Additionally, the following methods return an async iterator yielding :class:`~.Path` objects: + + * :meth:`~pathlib.Path.glob` + * :meth:`~pathlib.Path.iterdir` + * :meth:`~pathlib.Path.rglob` + """ + + __slots__ = '_path', '__weakref__' + + __weakref__: Any + + def __init__(self, *args: Union[str, 'PathLike[str]']) -> None: + self._path: Final[pathlib.Path] = pathlib.Path(*args) + + def __fspath__(self) -> str: + return self._path.__fspath__() + + def __str__(self) -> str: + return self._path.__str__() + + def __repr__(self) -> str: + return f'{self.__class__.__name__}({self.as_posix()!r})' + + def __bytes__(self) -> bytes: + return self._path.__bytes__() + + def __hash__(self) -> int: + return self._path.__hash__() + + def __eq__(self, other: object) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__eq__(target) + + def __lt__(self, other: 'Path') -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__lt__(target) + + def __le__(self, other: 'Path') -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__le__(target) + + def __gt__(self, other: 'Path') -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__gt__(target) + + def __ge__(self, other: 'Path') -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__ge__(target) + + def __truediv__(self, other: Any) -> 'Path': + return Path(self._path / other) + + def __rtruediv__(self, other: Any) -> 'Path': + return Path(other) / self + + @property + def parts(self) -> Tuple[str, ...]: + return self._path.parts + + @property + def drive(self) -> str: + return self._path.drive + + @property + def root(self) -> str: + return self._path.root + + @property + def anchor(self) -> str: + return self._path.anchor + + @property + def parents(self) -> Sequence['Path']: + return tuple(Path(p) for p in self._path.parents) + + @property + def parent(self) -> 'Path': + return Path(self._path.parent) + + @property + def name(self) -> str: + return self._path.name + + @property + def suffix(self) -> str: + return self._path.suffix + + @property + def suffixes(self) -> List[str]: + return self._path.suffixes + + @property + def stem(self) -> str: + return self._path.stem + + async def absolute(self) -> 'Path': + path = await to_thread.run_sync(self._path.absolute) + return Path(path) + + def as_posix(self) -> str: + return self._path.as_posix() + + def as_uri(self) -> str: + return self._path.as_uri() + + def match(self, path_pattern: str) -> bool: + return self._path.match(path_pattern) + + def is_relative_to(self, *other: Union[str, 'PathLike[str]']) -> bool: + try: + self.relative_to(*other) + return True + except ValueError: + return False + + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: + func = partial(os.chmod, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, mode) + + @classmethod + async def cwd(cls) -> 'Path': + path = await to_thread.run_sync(pathlib.Path.cwd) + return cls(path) + + async def exists(self) -> bool: + return await to_thread.run_sync(self._path.exists, cancellable=True) + + async def expanduser(self) -> 'Path': + return Path(await to_thread.run_sync(self._path.expanduser, cancellable=True)) + + def glob(self, pattern: str) -> AsyncIterator['Path']: + gen = self._path.glob(pattern) + return _PathIterator(gen) + + async def group(self) -> str: + return await to_thread.run_sync(self._path.group, cancellable=True) + + async def hardlink_to(self, target: Union[str, pathlib.Path, 'Path']) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(os.link, target, self) + + @classmethod + async def home(cls) -> 'Path': + home_path = await to_thread.run_sync(pathlib.Path.home) + return cls(home_path) + + def is_absolute(self) -> bool: + return self._path.is_absolute() + + async def is_block_device(self) -> bool: + return await to_thread.run_sync(self._path.is_block_device, cancellable=True) + + async def is_char_device(self) -> bool: + return await to_thread.run_sync(self._path.is_char_device, cancellable=True) + + async def is_dir(self) -> bool: + return await to_thread.run_sync(self._path.is_dir, cancellable=True) + + async def is_fifo(self) -> bool: + return await to_thread.run_sync(self._path.is_fifo, cancellable=True) + + async def is_file(self) -> bool: + return await to_thread.run_sync(self._path.is_file, cancellable=True) + + async def is_mount(self) -> bool: + return await to_thread.run_sync(os.path.ismount, self._path, cancellable=True) + + def is_reserved(self) -> bool: + return self._path.is_reserved() + + async def is_socket(self) -> bool: + return await to_thread.run_sync(self._path.is_socket, cancellable=True) + + async def is_symlink(self) -> bool: + return await to_thread.run_sync(self._path.is_symlink, cancellable=True) + + def iterdir(self) -> AsyncIterator['Path']: + gen = self._path.iterdir() + return _PathIterator(gen) + + def joinpath(self, *args: Union[str, 'PathLike[str]']) -> 'Path': + return Path(self._path.joinpath(*args)) + + async def lchmod(self, mode: int) -> None: + await to_thread.run_sync(self._path.lchmod, mode) + + async def lstat(self) -> os.stat_result: + return await to_thread.run_sync(self._path.lstat, cancellable=True) + + async def mkdir(self, mode: int = 0o777, parents: bool = False, + exist_ok: bool = False) -> None: + await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) + + @overload + async def open(self, mode: OpenBinaryMode, buffering: int = ..., encoding: Optional[str] = ..., + errors: Optional[str] = ..., newline: Optional[str] = ...) -> AsyncFile[bytes]: + ... + + @overload + async def open(self, mode: OpenTextMode = ..., buffering: int = ..., + encoding: Optional[str] = ..., errors: Optional[str] = ..., + newline: Optional[str] = ...) -> AsyncFile[str]: + ... + + async def open(self, mode: str = 'r', buffering: int = -1, encoding: Optional[str] = None, + errors: Optional[str] = None, newline: Optional[str] = None) -> AsyncFile[Any]: + fp = await to_thread.run_sync(self._path.open, mode, buffering, encoding, errors, newline) + return AsyncFile(fp) + + async def owner(self) -> str: + return await to_thread.run_sync(self._path.owner, cancellable=True) + + async def read_bytes(self) -> bytes: + return await to_thread.run_sync(self._path.read_bytes) + + async def read_text(self, encoding: Optional[str] = None, errors: Optional[str] = None) -> str: + return await to_thread.run_sync(self._path.read_text, encoding, errors) + + def relative_to(self, *other: Union[str, 'PathLike[str]']) -> 'Path': + return Path(self._path.relative_to(*other)) + + async def readlink(self) -> 'Path': + target = await to_thread.run_sync(os.readlink, self._path) + return Path(cast(str, target)) + + async def rename(self, target: Union[str, pathlib.PurePath, 'Path']) -> 'Path': + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.rename, target) + return Path(target) + + async def replace(self, target: Union[str, pathlib.PurePath, 'Path']) -> 'Path': + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.replace, target) + return Path(target) + + async def resolve(self, strict: bool = False) -> 'Path': + func = partial(self._path.resolve, strict=strict) + return Path(await to_thread.run_sync(func, cancellable=True)) + + def rglob(self, pattern: str) -> AsyncIterator['Path']: + gen = self._path.rglob(pattern) + return _PathIterator(gen) + + async def rmdir(self) -> None: + await to_thread.run_sync(self._path.rmdir) + + async def samefile(self, other_path: Union[str, bytes, int, pathlib.Path, 'Path']) -> bool: + if isinstance(other_path, Path): + other_path = other_path._path + + return await to_thread.run_sync(self._path.samefile, other_path, cancellable=True) + + async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: + func = partial(os.stat, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, cancellable=True) + + async def symlink_to(self, target: Union[str, pathlib.Path, 'Path'], + target_is_directory: bool = False) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) + + async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: + await to_thread.run_sync(self._path.touch, mode, exist_ok) + + async def unlink(self, missing_ok: bool = False) -> None: + try: + await to_thread.run_sync(self._path.unlink) + except FileNotFoundError: + if not missing_ok: + raise + + def with_name(self, name: str) -> 'Path': + return Path(self._path.with_name(name)) + + def with_stem(self, stem: str) -> 'Path': + return Path(self._path.with_name(stem + self._path.suffix)) + + def with_suffix(self, suffix: str) -> 'Path': + return Path(self._path.with_suffix(suffix)) + + async def write_bytes(self, data: bytes) -> int: + return await to_thread.run_sync(self._path.write_bytes, data) + + async def write_text(self, data: str, encoding: Optional[str] = None, + errors: Optional[str] = None, newline: Optional[str] = None) -> int: + # Path.write_text() does not support the "newline" parameter before Python 3.10 + def sync_write_text() -> int: + with self._path.open('w', encoding=encoding, errors=errors, newline=newline) as fp: + return fp.write(data) + + return await to_thread.run_sync(sync_write_text) + + +PathLike.register(Path) diff --git a/venv/lib/python3.10/site-packages/anyio/_core/_resources.py b/venv/lib/python3.10/site-packages/anyio/_core/_resources.py new file mode 100644 index 0000000..b9414f7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_core/_resources.py @@ -0,0 +1,16 @@ +from ..abc import AsyncResource +from ._tasks import CancelScope + + +async def aclose_forcefully(resource: AsyncResource) -> None: + """ + Close an asynchronous resource in a cancelled scope. + + Doing this closes the resource without waiting on anything. + + :param resource: the resource to close + + """ + with CancelScope() as scope: + scope.cancel() + await resource.aclose() diff --git a/venv/lib/python3.10/site-packages/anyio/_core/_signals.py b/venv/lib/python3.10/site-packages/anyio/_core/_signals.py new file mode 100644 index 0000000..f761982 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_core/_signals.py @@ -0,0 +1,22 @@ +from typing import AsyncIterator + +from ._compat import DeprecatedAsyncContextManager +from ._eventloop import get_asynclib + + +def open_signal_receiver(*signals: int) -> DeprecatedAsyncContextManager[AsyncIterator[int]]: + """ + Start receiving operating system signals. + + :param signals: signals to receive (e.g. ``signal.SIGINT``) + :return: an asynchronous context manager for an asynchronous iterator which yields signal + numbers + + .. warning:: Windows does not support signals natively so it is best to avoid relying on this + in cross-platform applications. + + .. warning:: On asyncio, this permanently replaces any previous signal handler for the given + signals, as set via :meth:`~asyncio.loop.add_signal_handler`. + + """ + return get_asynclib().open_signal_receiver(*signals) diff --git a/venv/lib/python3.10/site-packages/anyio/_core/_sockets.py b/venv/lib/python3.10/site-packages/anyio/_core/_sockets.py new file mode 100644 index 0000000..c086edc --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_core/_sockets.py @@ -0,0 +1,506 @@ +import socket +import ssl +import sys +from ipaddress import IPv6Address, ip_address +from os import PathLike, chmod +from pathlib import Path +from socket import AddressFamily, SocketKind +from typing import Awaitable, List, Optional, Tuple, Union, cast, overload + +from .. import to_thread +from ..abc import ( + ConnectedUDPSocket, IPAddressType, IPSockAddrType, SocketListener, SocketStream, UDPSocket, + UNIXSocketStream) +from ..streams.stapled import MultiListener +from ..streams.tls import TLSStream +from ._eventloop import get_asynclib +from ._resources import aclose_forcefully +from ._synchronization import Event +from ._tasks import create_task_group, move_on_after + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + +IPPROTO_IPV6 = getattr(socket, 'IPPROTO_IPV6', 41) # https://bugs.python.org/issue29515 + +GetAddrInfoReturnType = List[Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int]]] +AnyIPAddressFamily = Literal[AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, + AddressFamily.AF_INET6] +IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] + + +# tls_hostname given +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + ssl_context: Optional[ssl.SSLContext] = ..., tls_standard_compatible: bool = ..., + tls_hostname: str, happy_eyeballs_delay: float = ... +) -> TLSStream: + ... + + +# ssl_context given +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + ssl_context: ssl.SSLContext, tls_standard_compatible: bool = ..., + tls_hostname: Optional[str] = ..., happy_eyeballs_delay: float = ... +) -> TLSStream: + ... + + +# tls=True +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + tls: Literal[True], ssl_context: Optional[ssl.SSLContext] = ..., + tls_standard_compatible: bool = ..., tls_hostname: Optional[str] = ..., + happy_eyeballs_delay: float = ... +) -> TLSStream: + ... + + +# tls=False +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + tls: Literal[False], ssl_context: Optional[ssl.SSLContext] = ..., + tls_standard_compatible: bool = ..., tls_hostname: Optional[str] = ..., + happy_eyeballs_delay: float = ... +) -> SocketStream: + ... + + +# No TLS arguments +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + happy_eyeballs_delay: float = ... +) -> SocketStream: + ... + + +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = None, + tls: bool = False, ssl_context: Optional[ssl.SSLContext] = None, + tls_standard_compatible: bool = True, tls_hostname: Optional[str] = None, + happy_eyeballs_delay: float = 0.25 +) -> Union[SocketStream, TLSStream]: + """ + Connect to a host using the TCP protocol. + + This function implements the stateless version of the Happy Eyeballs algorithm (RFC 6555). + If ``address`` is a host name that resolves to multiple IP addresses, each one is tried until + one connection attempt succeeds. If the first attempt does not connected within 250 + milliseconds, a second attempt is started using the next address in the list, and so on. + On IPv6 enabled systems, an IPv6 address (if available) is tried first. + + When the connection has been established, a TLS handshake will be done if either + ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. + + :param remote_host: the IP address or host name to connect to + :param remote_port: port on the target host to connect to + :param local_host: the interface address or name to bind the socket to before connecting + :param tls: ``True`` to do a TLS handshake with the connected stream and return a + :class:`~anyio.streams.tls.TLSStream` instead + :param ssl_context: the SSL context object to use (if omitted, a default context is created) + :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake before closing + the stream and requires that the server does this as well. Otherwise, + :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. + Some protocols, such as HTTP, require this option to be ``False``. + See :meth:`~ssl.SSLContext.wrap_socket` for details. + :param tls_hostname: host name to check the server certificate against (defaults to the value + of ``remote_host``) + :param happy_eyeballs_delay: delay (in seconds) before starting the next connection attempt + :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream + :raises OSError: if the connection attempt fails + + """ + # Placed here due to https://github.com/python/mypy/issues/7057 + connected_stream: Optional[SocketStream] = None + + async def try_connect(remote_host: str, event: Event) -> None: + nonlocal connected_stream + try: + stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) + except OSError as exc: + oserrors.append(exc) + return + else: + if connected_stream is None: + connected_stream = stream + tg.cancel_scope.cancel() + else: + await stream.aclose() + finally: + event.set() + + asynclib = get_asynclib() + local_address: Optional[IPSockAddrType] = None + family = socket.AF_UNSPEC + if local_host: + gai_res = await getaddrinfo(str(local_host), None) + family, *_, local_address = gai_res[0] + + target_host = str(remote_host) + try: + addr_obj = ip_address(remote_host) + except ValueError: + # getaddrinfo() will raise an exception if name resolution fails + gai_res = await getaddrinfo(target_host, remote_port, family=family, + type=socket.SOCK_STREAM) + + # Organize the list so that the first address is an IPv6 address (if available) and the + # second one is an IPv4 addresses. The rest can be in whatever order. + v6_found = v4_found = False + target_addrs: List[Tuple[socket.AddressFamily, str]] = [] + for af, *rest, sa in gai_res: + if af == socket.AF_INET6 and not v6_found: + v6_found = True + target_addrs.insert(0, (af, sa[0])) + elif af == socket.AF_INET and not v4_found and v6_found: + v4_found = True + target_addrs.insert(1, (af, sa[0])) + else: + target_addrs.append((af, sa[0])) + else: + if isinstance(addr_obj, IPv6Address): + target_addrs = [(socket.AF_INET6, addr_obj.compressed)] + else: + target_addrs = [(socket.AF_INET, addr_obj.compressed)] + + oserrors: List[OSError] = [] + async with create_task_group() as tg: + for i, (af, addr) in enumerate(target_addrs): + event = Event() + tg.start_soon(try_connect, addr, event) + with move_on_after(happy_eyeballs_delay): + await event.wait() + + if connected_stream is None: + cause = oserrors[0] if len(oserrors) == 1 else asynclib.ExceptionGroup(oserrors) + raise OSError('All connection attempts failed') from cause + + if tls or tls_hostname or ssl_context: + try: + return await TLSStream.wrap(connected_stream, server_side=False, + hostname=tls_hostname or str(remote_host), + ssl_context=ssl_context, + standard_compatible=tls_standard_compatible) + except BaseException: + await aclose_forcefully(connected_stream) + raise + + return connected_stream + + +async def connect_unix(path: Union[str, 'PathLike[str]']) -> UNIXSocketStream: + """ + Connect to the given UNIX socket. + + Not available on Windows. + + :param path: path to the socket + :return: a socket stream object + + """ + path = str(Path(path)) + return await get_asynclib().connect_unix(path) + + +async def create_tcp_listener( + *, local_host: Optional[IPAddressType] = None, local_port: int = 0, + family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, backlog: int = 65536, + reuse_port: bool = False +) -> MultiListener[SocketStream]: + """ + Create a TCP socket listener. + + :param local_port: port number to listen on + :param local_host: IP address of the interface to listen on. If omitted, listen on all IPv4 + and IPv6 interfaces. To listen on all interfaces on a specific address family, use + ``0.0.0.0`` for IPv4 or ``::`` for IPv6. + :param family: address family (used if ``interface`` was omitted) + :param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or + 65536) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a list of listener objects + + """ + asynclib = get_asynclib() + backlog = min(backlog, 65536) + local_host = str(local_host) if local_host is not None else None + gai_res = await getaddrinfo(local_host, local_port, family=family, # type: ignore[arg-type] + type=socket.SOCK_STREAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG) + listeners: List[SocketListener] = [] + try: + # The set() is here to work around a glibc bug: + # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 + for fam, *_, sockaddr in sorted(set(gai_res)): + raw_socket = socket.socket(fam) + raw_socket.setblocking(False) + + # For Windows, enable exclusive address use. For others, enable address reuse. + if sys.platform == 'win32': + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + if reuse_port: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + # If only IPv6 was requested, disable dual stack operation + if fam == socket.AF_INET6: + raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + + raw_socket.bind(sockaddr) + raw_socket.listen(backlog) + listener = asynclib.TCPSocketListener(raw_socket) + listeners.append(listener) + except BaseException: + for listener in listeners: + await listener.aclose() + + raise + + return MultiListener(listeners) + + +async def create_unix_listener( + path: Union[str, 'PathLike[str]'], *, mode: Optional[int] = None, + backlog: int = 65536) -> SocketListener: + """ + Create a UNIX socket listener. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or + 65536) + :return: a listener object + + .. versionchanged:: 3.0 + If a socket already exists on the file system in the given path, it will be removed first. + + """ + path_str = str(path) + path = Path(path) + if path.is_socket(): + path.unlink() + + backlog = min(backlog, 65536) + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + try: + await to_thread.run_sync(raw_socket.bind, path_str, cancellable=True) + if mode is not None: + await to_thread.run_sync(chmod, path_str, mode, cancellable=True) + + raw_socket.listen(backlog) + return get_asynclib().UNIXSocketListener(raw_socket) + except BaseException: + raw_socket.close() + raise + + +async def create_udp_socket( + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, *, + local_host: Optional[IPAddressType] = None, local_port: int = 0, reuse_port: bool = False +) -> UDPSocket: + """ + Create a UDP socket. + + If ``port`` has been given, the socket will be bound to this port on the local machine, + making this socket suitable for providing UDP based services. + + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from + ``local_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a UDP socket + + """ + if family is AddressFamily.AF_UNSPEC and not local_host: + raise ValueError('Either "family" or "local_host" must be given') + + if local_host: + gai_res = await getaddrinfo(str(local_host), local_port, family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + elif family is AddressFamily.AF_INET6: + local_address = ('::', 0) + else: + local_address = ('0.0.0.0', 0) + + return await get_asynclib().create_udp_socket(family, local_address, None, reuse_port) + + +async def create_connected_udp_socket( + remote_host: IPAddressType, remote_port: int, *, + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + local_host: Optional[IPAddressType] = None, local_port: int = 0, reuse_port: bool = False +) -> ConnectedUDPSocket: + """ + Create a connected UDP socket. + + Connected UDP sockets can only communicate with the specified remote host/port, and any packets + sent from other sources are dropped. + + :param remote_host: remote host to set as the default target + :param remote_port: port on the remote host to set as the default target + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from + ``local_host`` or ``remote_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a connected UDP socket + + """ + local_address = None + if local_host: + gai_res = await getaddrinfo(str(local_host), local_port, family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + + gai_res = await getaddrinfo(str(remote_host), remote_port, family=family, + type=socket.SOCK_DGRAM) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + remote_address = gai_res[0][-1] + + return await get_asynclib().create_udp_socket(family, local_address, remote_address, + reuse_port) + + +async def getaddrinfo(host: Union[bytearray, bytes, str], port: Union[str, int, None], *, + family: Union[int, AddressFamily] = 0, type: Union[int, SocketKind] = 0, + proto: int = 0, flags: int = 0) -> GetAddrInfoReturnType: + """ + Look up a numeric IP address given a host name. + + Internationalized domain names are translated according to the (non-transitional) IDNA 2008 + standard. + + .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of + (host, port), unlike what :func:`socket.getaddrinfo` does. + + :param host: host name + :param port: port number + :param family: socket family (`'AF_INET``, ...) + :param type: socket type (``SOCK_STREAM``, ...) + :param proto: protocol number + :param flags: flags to pass to upstream ``getaddrinfo()`` + :return: list of tuples containing (family, type, proto, canonname, sockaddr) + + .. seealso:: :func:`socket.getaddrinfo` + + """ + # Handle unicode hostnames + if isinstance(host, str): + try: + encoded_host = host.encode('ascii') + except UnicodeEncodeError: + import idna + encoded_host = idna.encode(host, uts46=True) + else: + encoded_host = host + + gai_res = await get_asynclib().getaddrinfo(encoded_host, port, family=family, type=type, + proto=proto, flags=flags) + return [(family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) + for family, type, proto, canonname, sockaddr in gai_res] + + +def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[Tuple[str, str]]: + """ + Look up the host name of an IP address. + + :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) + :param flags: flags to pass to upstream ``getnameinfo()`` + :return: a tuple of (host name, service name) + + .. seealso:: :func:`socket.getnameinfo` + + """ + return get_asynclib().getnameinfo(sockaddr, flags) + + +def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket has data to be read. + + This does **NOT** work on Windows when using the asyncio backend with a proactor event loop + (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher level + constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become readable + + """ + return get_asynclib().wait_socket_readable(sock) + + +def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket can be written to. + + This does **NOT** work on Windows when using the asyncio backend with a proactor event loop + (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher level + constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become writable + + """ + return get_asynclib().wait_socket_writable(sock) + + +# +# Private API +# + +def convert_ipv6_sockaddr( + sockaddr: Union[Tuple[str, int, int, int], Tuple[str, int]] +) -> Tuple[str, int]: + """ + Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. + + If the scope ID is nonzero, it is added to the address, separated with ``%``. + Otherwise the flow id and scope id are simply cut off from the tuple. + Any other kinds of socket addresses are returned as-is. + + :param sockaddr: the result of :meth:`~socket.socket.getsockname` + :return: the converted socket address + + """ + # This is more complicated than it should be because of MyPy + if isinstance(sockaddr, tuple) and len(sockaddr) == 4: + host, port, flowinfo, scope_id = cast(Tuple[str, int, int, int], sockaddr) + if scope_id: + # Add scope_id to the address + return f"{host}%{scope_id}", port + else: + return host, port + else: + return cast(Tuple[str, int], sockaddr) diff --git a/venv/lib/python3.10/site-packages/anyio/_core/_streams.py b/venv/lib/python3.10/site-packages/anyio/_core/_streams.py new file mode 100644 index 0000000..f43875c --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_core/_streams.py @@ -0,0 +1,42 @@ +import math +from typing import Any, Optional, Tuple, Type, TypeVar, overload + +from ..streams.memory import ( + MemoryObjectReceiveStream, MemoryObjectSendStream, MemoryObjectStreamState) + +T_Item = TypeVar('T_Item') + + +@overload +def create_memory_object_stream( + max_buffer_size: float, item_type: Type[T_Item] +) -> Tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: + ... + + +@overload +def create_memory_object_stream( + max_buffer_size: float = 0 +) -> Tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]: + ... + + +def create_memory_object_stream( + max_buffer_size: float = 0, item_type: Optional[Type[T_Item]] = None +) -> Tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]: + """ + Create a memory object stream. + + :param max_buffer_size: number of items held in the buffer until ``send()`` starts blocking + :param item_type: type of item, for marking the streams with the right generic type for + static typing (not used at run time) + :return: a tuple of (send stream, receive stream) + + """ + if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): + raise ValueError('max_buffer_size must be either an integer or math.inf') + if max_buffer_size < 0: + raise ValueError('max_buffer_size cannot be negative') + + state: MemoryObjectStreamState = MemoryObjectStreamState(max_buffer_size) + return MemoryObjectSendStream(state), MemoryObjectReceiveStream(state) diff --git a/venv/lib/python3.10/site-packages/anyio/_core/_subprocesses.py b/venv/lib/python3.10/site-packages/anyio/_core/_subprocesses.py new file mode 100644 index 0000000..f4ae139 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_core/_subprocesses.py @@ -0,0 +1,99 @@ +from io import BytesIO +from os import PathLike +from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess +from typing import AsyncIterable, List, Mapping, Optional, Sequence, Union, cast + +from ..abc import Process +from ._eventloop import get_asynclib +from ._tasks import create_task_group + + +async def run_process(command: Union[str, Sequence[str]], *, input: Optional[bytes] = None, + stdout: int = PIPE, stderr: int = PIPE, check: bool = True, + cwd: Union[str, bytes, 'PathLike[str]', None] = None, + env: Optional[Mapping[str, str]] = None, start_new_session: bool = False, + ) -> 'CompletedProcess[bytes]': + """ + Run an external command in a subprocess and wait until it completes. + + .. seealso:: :func:`subprocess.run` + + :param command: either a string to pass to the shell, or an iterable of strings containing the + executable name or path and its arguments + :param input: bytes passed to the standard input of the subprocess + :param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or + :data:`subprocess.STDOUT` + :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the process + terminates with a return code other than 0 + :param cwd: If not ``None``, change the working directory to this before running the command + :param env: if not ``None``, this mapping replaces the inherited environment variables from the + parent process + :param start_new_session: if ``true`` the setsid() system call will be made in the child + process prior to the execution of the subprocess. (POSIX only) + :return: an object representing the completed process + :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process exits with a + nonzero return code + + """ + async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: + buffer = BytesIO() + async for chunk in stream: + buffer.write(chunk) + + stream_contents[index] = buffer.getvalue() + + async with await open_process(command, stdin=PIPE if input else DEVNULL, stdout=stdout, + stderr=stderr, cwd=cwd, env=env, + start_new_session=start_new_session) as process: + stream_contents: List[Optional[bytes]] = [None, None] + try: + async with create_task_group() as tg: + if process.stdout: + tg.start_soon(drain_stream, process.stdout, 0) + if process.stderr: + tg.start_soon(drain_stream, process.stderr, 1) + if process.stdin and input: + await process.stdin.send(input) + await process.stdin.aclose() + + await process.wait() + except BaseException: + process.kill() + raise + + output, errors = stream_contents + if check and process.returncode != 0: + raise CalledProcessError(cast(int, process.returncode), command, output, errors) + + return CompletedProcess(command, cast(int, process.returncode), output, errors) + + +async def open_process(command: Union[str, Sequence[str]], *, stdin: int = PIPE, + stdout: int = PIPE, stderr: int = PIPE, + cwd: Union[str, bytes, 'PathLike[str]', None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False) -> Process: + """ + Start an external command in a subprocess. + + .. seealso:: :class:`subprocess.Popen` + + :param command: either a string to pass to the shell, or an iterable of strings containing the + executable name or path and its arguments + :param stdin: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL` + :param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or + :data:`subprocess.STDOUT` + :param cwd: If not ``None``, the working directory is changed before executing + :param env: If env is not ``None``, it must be a mapping that defines the environment + variables for the new process + :param start_new_session: if ``true`` the setsid() system call will be made in the child + process prior to the execution of the subprocess. (POSIX only) + :return: an asynchronous process object + + """ + shell = isinstance(command, str) + return await get_asynclib().open_process(command, shell=shell, stdin=stdin, stdout=stdout, + stderr=stderr, cwd=cwd, env=env, + start_new_session=start_new_session) diff --git a/venv/lib/python3.10/site-packages/anyio/_core/_synchronization.py b/venv/lib/python3.10/site-packages/anyio/_core/_synchronization.py new file mode 100644 index 0000000..d75afed --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_core/_synchronization.py @@ -0,0 +1,566 @@ +from collections import deque +from dataclasses import dataclass +from types import TracebackType +from typing import Deque, Optional, Tuple, Type +from warnings import warn + +from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled +from ._compat import DeprecatedAwaitable +from ._eventloop import get_asynclib +from ._exceptions import BusyResourceError, WouldBlock +from ._tasks import CancelScope +from ._testing import TaskInfo, get_current_task + + +@dataclass(frozen=True) +class EventStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` + """ + + tasks_waiting: int + + +@dataclass(frozen=True) +class CapacityLimiterStatistics: + """ + :ivar int borrowed_tokens: number of tokens currently borrowed by tasks + :ivar float total_tokens: total number of available tokens + :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from this + limiter + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.CapacityLimiter.acquire` or + :meth:`~.CapacityLimiter.acquire_on_behalf_of` + """ + + borrowed_tokens: int + total_tokens: float + borrowers: Tuple[object, ...] + tasks_waiting: int + + +@dataclass(frozen=True) +class LockStatistics: + """ + :ivar bool locked: flag indicating if this lock is locked or not + :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the lock is not + held by any task) + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` + """ + + locked: bool + owner: Optional[TaskInfo] + tasks_waiting: int + + +@dataclass(frozen=True) +class ConditionStatistics: + """ + :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` + :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying :class:`~.Lock` + """ + + tasks_waiting: int + lock_statistics: LockStatistics + + +@dataclass(frozen=True) +class SemaphoreStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` + + """ + tasks_waiting: int + + +class Event: + def __new__(cls) -> 'Event': + return get_asynclib().Event() + + def set(self) -> DeprecatedAwaitable: + """Set the flag, notifying all listeners.""" + raise NotImplementedError + + def is_set(self) -> bool: + """Return ``True`` if the flag is set, ``False`` if not.""" + raise NotImplementedError + + async def wait(self) -> None: + """ + Wait until the flag has been set. + + If the flag has already been set when this method is called, it returns immediately. + + """ + raise NotImplementedError + + def statistics(self) -> EventStatistics: + """Return statistics about the current state of this event.""" + raise NotImplementedError + + +class Lock: + _owner_task: Optional[TaskInfo] = None + + def __init__(self) -> None: + self._waiters: Deque[Tuple[TaskInfo, Event]] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + task = get_current_task() + event = Event() + token = task, event + self._waiters.append(token) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(token) + elif self._owner_task == task: + self.release() + + raise + + assert self._owner_task == task + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + task = get_current_task() + if self._owner_task == task: + raise RuntimeError('Attempted to acquire an already held Lock') + + if self._owner_task is not None: + raise WouldBlock + + self._owner_task = task + + def release(self) -> DeprecatedAwaitable: + """Release the lock.""" + if self._owner_task != get_current_task(): + raise RuntimeError('The current task is not holding this lock') + + if self._waiters: + self._owner_task, event = self._waiters.popleft() + event.set() + else: + del self._owner_task + + return DeprecatedAwaitable(self.release) + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + return self._owner_task is not None + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + """ + return LockStatistics(self.locked(), self._owner_task, len(self._waiters)) + + +class Condition: + _owner_task: Optional[TaskInfo] = None + + def __init__(self, lock: Optional[Lock] = None): + self._lock = lock or Lock() + self._waiters: Deque[Event] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.release() + + def _check_acquired(self) -> None: + if self._owner_task != get_current_task(): + raise RuntimeError('The current task is not holding the underlying lock') + + async def acquire(self) -> None: + """Acquire the underlying lock.""" + await self._lock.acquire() + self._owner_task = get_current_task() + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + self._owner_task = get_current_task() + + def release(self) -> DeprecatedAwaitable: + """Release the underlying lock.""" + self._lock.release() + return DeprecatedAwaitable(self.release) + + def locked(self) -> bool: + """Return True if the lock is set.""" + return self._lock.locked() + + def notify(self, n: int = 1) -> None: + """Notify exactly n listeners.""" + self._check_acquired() + for _ in range(n): + try: + event = self._waiters.popleft() + except IndexError: + break + + event.set() + + def notify_all(self) -> None: + """Notify all the listeners.""" + self._check_acquired() + for event in self._waiters: + event.set() + + self._waiters.clear() + + async def wait(self) -> None: + """Wait for a notification.""" + await checkpoint() + event = Event() + self._waiters.append(event) + self.release() + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + + raise + finally: + with CancelScope(shield=True): + await self.acquire() + + def statistics(self) -> ConditionStatistics: + """ + Return statistics about the current state of this condition. + + .. versionadded:: 3.0 + """ + return ConditionStatistics(len(self._waiters), self._lock.statistics()) + + +class Semaphore: + def __init__(self, initial_value: int, *, max_value: Optional[int] = None): + if not isinstance(initial_value, int): + raise TypeError('initial_value must be an integer') + if initial_value < 0: + raise ValueError('initial_value must be >= 0') + if max_value is not None: + if not isinstance(max_value, int): + raise TypeError('max_value must be an integer or None') + if max_value < initial_value: + raise ValueError('max_value must be equal to or higher than initial_value') + + self._value = initial_value + self._max_value = max_value + self._waiters: Deque[Event] = deque() + + async def __aenter__(self) -> 'Semaphore': + await self.acquire() + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.release() + + async def acquire(self) -> None: + """Decrement the semaphore value, blocking if necessary.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + event = Event() + self._waiters.append(event) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + else: + self.release() + + raise + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + if self._value == 0: + raise WouldBlock + + self._value -= 1 + + def release(self) -> DeprecatedAwaitable: + """Increment the semaphore value.""" + if self._max_value is not None and self._value == self._max_value: + raise ValueError('semaphore released too many times') + + if self._waiters: + self._waiters.popleft().set() + else: + self._value += 1 + + return DeprecatedAwaitable(self.release) + + @property + def value(self) -> int: + """The current value of the semaphore.""" + return self._value + + @property + def max_value(self) -> Optional[int]: + """The maximum value of the semaphore.""" + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + """ + Return statistics about the current state of this semaphore. + + .. versionadded:: 3.0 + """ + return SemaphoreStatistics(len(self._waiters)) + + +class CapacityLimiter: + def __new__(cls, total_tokens: float) -> 'CapacityLimiter': + return get_asynclib().CapacityLimiter(total_tokens) + + async def __aenter__(self) -> None: + raise NotImplementedError + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + raise NotImplementedError + + @property + def total_tokens(self) -> float: + """ + The total number of tokens available for borrowing. + + This is a read-write property. If the total number of tokens is increased, the + proportionate number of tasks waiting on this limiter will be granted their tokens. + + .. versionchanged:: 3.0 + The property is now writable. + + """ + raise NotImplementedError + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + raise NotImplementedError + + async def set_total_tokens(self, value: float) -> None: + warn('CapacityLimiter.set_total_tokens has been deprecated. Set the value of the' + '"total_tokens" attribute directly.', DeprecationWarning) + self.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + """The number of tokens that have currently been borrowed.""" + raise NotImplementedError + + @property + def available_tokens(self) -> float: + """The number of tokens currently available to be borrowed""" + raise NotImplementedError + + def acquire_nowait(self) -> DeprecatedAwaitable: + """ + Acquire a token for the current task without waiting for one to become available. + + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + """ + Acquire a token without waiting for one to become available. + + :param borrower: the entity borrowing a token + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + async def acquire(self) -> None: + """ + Acquire a token for the current task, waiting if necessary for one to become available. + + """ + raise NotImplementedError + + async def acquire_on_behalf_of(self, borrower: object) -> None: + """ + Acquire a token, waiting if necessary for one to become available. + + :param borrower: the entity borrowing a token + + """ + raise NotImplementedError + + def release(self) -> None: + """ + Release the token held by the current task. + :raises RuntimeError: if the current task has not borrowed a token from this limiter. + + """ + raise NotImplementedError + + def release_on_behalf_of(self, borrower: object) -> None: + """ + Release the token held by the given borrower. + + :raises RuntimeError: if the borrower has not borrowed a token from this limiter. + + """ + raise NotImplementedError + + def statistics(self) -> CapacityLimiterStatistics: + """ + Return statistics about the current state of this limiter. + + .. versionadded:: 3.0 + + """ + raise NotImplementedError + + +def create_lock() -> Lock: + """ + Create an asynchronous lock. + + :return: a lock object + + .. deprecated:: 3.0 + Use :class:`~Lock` directly. + + """ + warn('create_lock() is deprecated -- use Lock() directly', DeprecationWarning) + return Lock() + + +def create_condition(lock: Optional[Lock] = None) -> Condition: + """ + Create an asynchronous condition. + + :param lock: the lock to base the condition object on + :return: a condition object + + .. deprecated:: 3.0 + Use :class:`~Condition` directly. + + """ + warn('create_condition() is deprecated -- use Condition() directly', DeprecationWarning) + return Condition(lock=lock) + + +def create_event() -> Event: + """ + Create an asynchronous event object. + + :return: an event object + + .. deprecated:: 3.0 + Use :class:`~Event` directly. + + """ + warn('create_event() is deprecated -- use Event() directly', DeprecationWarning) + return get_asynclib().Event() + + +def create_semaphore(value: int, *, max_value: Optional[int] = None) -> Semaphore: + """ + Create an asynchronous semaphore. + + :param value: the semaphore's initial value + :param max_value: if set, makes this a "bounded" semaphore that raises :exc:`ValueError` if the + semaphore's value would exceed this number + :return: a semaphore object + + .. deprecated:: 3.0 + Use :class:`~Semaphore` directly. + + """ + warn('create_semaphore() is deprecated -- use Semaphore() directly', DeprecationWarning) + return Semaphore(value, max_value=max_value) + + +def create_capacity_limiter(total_tokens: float) -> CapacityLimiter: + """ + Create a capacity limiter. + + :param total_tokens: the total number of tokens available for borrowing (can be an integer or + :data:`math.inf`) + :return: a capacity limiter object + + .. deprecated:: 3.0 + Use :class:`~CapacityLimiter` directly. + + """ + warn('create_capacity_limiter() is deprecated -- use CapacityLimiter() directly', + DeprecationWarning) + return get_asynclib().CapacityLimiter(total_tokens) + + +class ResourceGuard: + __slots__ = 'action', '_guarded' + + def __init__(self, action: str): + self.action = action + self._guarded = False + + def __enter__(self) -> None: + if self._guarded: + raise BusyResourceError(self.action) + + self._guarded = True + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + self._guarded = False + return None diff --git a/venv/lib/python3.10/site-packages/anyio/_core/_tasks.py b/venv/lib/python3.10/site-packages/anyio/_core/_tasks.py new file mode 100644 index 0000000..4db1cc4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_core/_tasks.py @@ -0,0 +1,158 @@ +import math +from types import TracebackType +from typing import Optional, Type +from warnings import warn + +from ..abc._tasks import TaskGroup, TaskStatus +from ._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable, DeprecatedAwaitableFloat +from ._eventloop import get_asynclib + + +class _IgnoredTaskStatus(TaskStatus): + def started(self, value: object = None) -> None: + pass + + +TASK_STATUS_IGNORED = _IgnoredTaskStatus() + + +class CancelScope(DeprecatedAsyncContextManager['CancelScope']): + """ + Wraps a unit of work that can be made separately cancellable. + + :param deadline: The time (clock value) when this scope is cancelled automatically + :param shield: ``True`` to shield the cancel scope from external cancellation + """ + + def __new__(cls, *, deadline: float = math.inf, shield: bool = False) -> 'CancelScope': + return get_asynclib().CancelScope(shield=shield, deadline=deadline) + + def cancel(self) -> DeprecatedAwaitable: + """Cancel this scope immediately.""" + raise NotImplementedError + + @property + def deadline(self) -> float: + """ + The time (clock value) when this scope is cancelled automatically. + + Will be ``float('inf')`` if no timeout has been set. + + """ + raise NotImplementedError + + @deadline.setter + def deadline(self, value: float) -> None: + raise NotImplementedError + + @property + def cancel_called(self) -> bool: + """``True`` if :meth:`cancel` has been called.""" + raise NotImplementedError + + @property + def shield(self) -> bool: + """ + ``True`` if this scope is shielded from external cancellation. + + While a scope is shielded, it will not receive cancellations from outside. + + """ + raise NotImplementedError + + @shield.setter + def shield(self, value: bool) -> None: + raise NotImplementedError + + def __enter__(self) -> 'CancelScope': + raise NotImplementedError + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + raise NotImplementedError + + +def open_cancel_scope(*, shield: bool = False) -> CancelScope: + """ + Open a cancel scope. + + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + .. deprecated:: 3.0 + Use :class:`~CancelScope` directly. + + """ + warn('open_cancel_scope() is deprecated -- use CancelScope() directly', DeprecationWarning) + return get_asynclib().CancelScope(shield=shield) + + +class FailAfterContextManager(DeprecatedAsyncContextManager[CancelScope]): + def __init__(self, cancel_scope: CancelScope): + self._cancel_scope = cancel_scope + + def __enter__(self) -> CancelScope: + return self._cancel_scope.__enter__() + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + retval = self._cancel_scope.__exit__(exc_type, exc_val, exc_tb) + if self._cancel_scope.cancel_called: + raise TimeoutError + + return retval + + +def fail_after(delay: Optional[float], shield: bool = False) -> FailAfterContextManager: + """ + Create a context manager which raises a :class:`TimeoutError` if does not finish in time. + + :param delay: maximum allowed time (in seconds) before raising the exception, or ``None`` to + disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a context manager that yields a cancel scope + :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.abc.CancelScope`\\] + + """ + deadline = (get_asynclib().current_time() + delay) if delay is not None else math.inf + cancel_scope = get_asynclib().CancelScope(deadline=deadline, shield=shield) + return FailAfterContextManager(cancel_scope) + + +def move_on_after(delay: Optional[float], shield: bool = False) -> CancelScope: + """ + Create a cancel scope with a deadline that expires after the given delay. + + :param delay: maximum allowed time (in seconds) before exiting the context block, or ``None`` + to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + """ + deadline = (get_asynclib().current_time() + delay) if delay is not None else math.inf + return get_asynclib().CancelScope(deadline=deadline, shield=shield) + + +def current_effective_deadline() -> DeprecatedAwaitableFloat: + """ + Return the nearest deadline among all the cancel scopes effective for the current task. + + :return: a clock value from the event loop's internal clock (``float('inf')`` if there is no + deadline in effect) + :rtype: float + + """ + return DeprecatedAwaitableFloat(get_asynclib().current_effective_deadline(), + current_effective_deadline) + + +def create_task_group() -> 'TaskGroup': + """ + Create a task group. + + :return: a task group + + """ + return get_asynclib().TaskGroup() diff --git a/venv/lib/python3.10/site-packages/anyio/_core/_testing.py b/venv/lib/python3.10/site-packages/anyio/_core/_testing.py new file mode 100644 index 0000000..d977eff --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_core/_testing.py @@ -0,0 +1,75 @@ +from typing import Any, Awaitable, Generator, Optional, Union + +from ._compat import DeprecatedAwaitableList, _warn_deprecation +from ._eventloop import get_asynclib + + +class TaskInfo: + """ + Represents an asynchronous task. + + :ivar int id: the unique identifier of the task + :ivar parent_id: the identifier of the parent task, if any + :vartype parent_id: Optional[int] + :ivar str name: the description of the task (if any) + :ivar ~collections.abc.Coroutine coro: the coroutine object of the task + """ + + __slots__ = '_name', 'id', 'parent_id', 'name', 'coro' + + def __init__(self, id: int, parent_id: Optional[int], name: Optional[str], + coro: Union[Generator, Awaitable[Any]]): + func = get_current_task + self._name = f'{func.__module__}.{func.__qualname__}' + self.id: int = id + self.parent_id: Optional[int] = parent_id + self.name: Optional[str] = name + self.coro: Union[Generator, Awaitable[Any]] = coro + + def __eq__(self, other: object) -> bool: + if isinstance(other, TaskInfo): + return self.id == other.id + + return NotImplemented + + def __hash__(self) -> int: + return hash(self.id) + + def __repr__(self) -> str: + return f'{self.__class__.__name__}(id={self.id!r}, name={self.name!r})' + + def __await__(self) -> Generator[None, None, "TaskInfo"]: + _warn_deprecation(self) + if False: + yield + + return self + + def _unwrap(self) -> 'TaskInfo': + return self + + +def get_current_task() -> TaskInfo: + """ + Return the current task. + + :return: a representation of the current task + + """ + return get_asynclib().get_current_task() + + +def get_running_tasks() -> DeprecatedAwaitableList[TaskInfo]: + """ + Return a list of running tasks in the current event loop. + + :return: a list of task info objects + + """ + tasks = get_asynclib().get_running_tasks() + return DeprecatedAwaitableList(tasks, func=get_running_tasks) + + +async def wait_all_tasks_blocked() -> None: + """Wait until all other tasks are waiting for something.""" + await get_asynclib().wait_all_tasks_blocked() diff --git a/venv/lib/python3.10/site-packages/anyio/_core/_typedattr.py b/venv/lib/python3.10/site-packages/anyio/_core/_typedattr.py new file mode 100644 index 0000000..797287d --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/_core/_typedattr.py @@ -0,0 +1,79 @@ +import sys +from typing import Any, Callable, Dict, Mapping, TypeVar, Union, overload + +from ._exceptions import TypedAttributeLookupError + +if sys.version_info >= (3, 8): + from typing import final +else: + from typing_extensions import final + +T_Attr = TypeVar('T_Attr') +T_Default = TypeVar('T_Default') +undefined = object() + + +def typed_attribute() -> Any: + """Return a unique object, used to mark typed attributes.""" + return object() + + +class TypedAttributeSet: + """ + Superclass for typed attribute collections. + + Checks that every public attribute of every subclass has a type annotation. + """ + + def __init_subclass__(cls) -> None: + annotations: Dict[str, Any] = getattr(cls, '__annotations__', {}) + for attrname in dir(cls): + if not attrname.startswith('_') and attrname not in annotations: + raise TypeError(f'Attribute {attrname!r} is missing its type annotation') + + super().__init_subclass__() + + +class TypedAttributeProvider: + """Base class for classes that wish to provide typed extra attributes.""" + + @property + def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: + """ + A mapping of the extra attributes to callables that return the corresponding values. + + If the provider wraps another provider, the attributes from that wrapper should also be + included in the returned mapping (but the wrapper may override the callables from the + wrapped instance). + + """ + return {} + + @overload + def extra(self, attribute: T_Attr) -> T_Attr: + ... + + @overload + def extra(self, attribute: T_Attr, default: T_Default) -> Union[T_Attr, T_Default]: + ... + + @final + def extra(self, attribute: Any, default: object = undefined) -> object: + """ + extra(attribute, default=undefined) + + Return the value of the given typed extra attribute. + + :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to look for + :param default: the value that should be returned if no value is found for the attribute + :raises ~anyio.TypedAttributeLookupError: if the search failed and no default value was + given + + """ + try: + return self.extra_attributes[attribute]() + except KeyError: + if default is undefined: + raise TypedAttributeLookupError('Attribute not found') from None + else: + return default diff --git a/venv/lib/python3.10/site-packages/anyio/abc/__init__.py b/venv/lib/python3.10/site-packages/anyio/abc/__init__.py new file mode 100644 index 0000000..592ef0e --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/abc/__init__.py @@ -0,0 +1,37 @@ +__all__ = ('AsyncResource', 'IPAddressType', 'IPSockAddrType', 'SocketAttribute', 'SocketStream', + 'SocketListener', 'UDPSocket', 'UNIXSocketStream', 'UDPPacketType', + 'ConnectedUDPSocket', 'UnreliableObjectReceiveStream', 'UnreliableObjectSendStream', + 'UnreliableObjectStream', 'ObjectReceiveStream', 'ObjectSendStream', 'ObjectStream', + 'ByteReceiveStream', 'ByteSendStream', 'ByteStream', 'AnyUnreliableByteReceiveStream', + 'AnyUnreliableByteSendStream', 'AnyUnreliableByteStream', 'AnyByteReceiveStream', + 'AnyByteSendStream', 'AnyByteStream', 'Listener', 'Process', 'Event', + 'Condition', 'Lock', 'Semaphore', 'CapacityLimiter', 'CancelScope', 'TaskGroup', + 'TaskStatus', 'TestRunner', 'BlockingPortal') + +from typing import Any + +from ._resources import AsyncResource +from ._sockets import ( + ConnectedUDPSocket, IPAddressType, IPSockAddrType, SocketAttribute, SocketListener, + SocketStream, UDPPacketType, UDPSocket, UNIXSocketStream) +from ._streams import ( + AnyByteReceiveStream, AnyByteSendStream, AnyByteStream, AnyUnreliableByteReceiveStream, + AnyUnreliableByteSendStream, AnyUnreliableByteStream, ByteReceiveStream, ByteSendStream, + ByteStream, Listener, ObjectReceiveStream, ObjectSendStream, ObjectStream, + UnreliableObjectReceiveStream, UnreliableObjectSendStream, UnreliableObjectStream) +from ._subprocesses import Process +from ._tasks import TaskGroup, TaskStatus +from ._testing import TestRunner + +# Re-exported here, for backwards compatibility +# isort: off +from .._core._synchronization import CapacityLimiter, Condition, Event, Lock, Semaphore +from .._core._tasks import CancelScope +from ..from_thread import BlockingPortal + +# Re-export imports so they look like they live directly in this package +key: str +value: Any +for key, value in list(locals().items()): + if getattr(value, '__module__', '').startswith('anyio.abc.'): + value.__module__ = __name__ diff --git a/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..9ad87f6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_resources.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_resources.cpython-310.pyc new file mode 100644 index 0000000..75d5465 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_resources.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_sockets.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_sockets.cpython-310.pyc new file mode 100644 index 0000000..856a74c Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_sockets.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_streams.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_streams.cpython-310.pyc new file mode 100644 index 0000000..be7034a Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_streams.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-310.pyc new file mode 100644 index 0000000..d257476 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_tasks.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_tasks.cpython-310.pyc new file mode 100644 index 0000000..5e2f408 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_tasks.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_testing.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_testing.cpython-310.pyc new file mode 100644 index 0000000..d4b683c Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_testing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/abc/_resources.py b/venv/lib/python3.10/site-packages/anyio/abc/_resources.py new file mode 100644 index 0000000..4594e6e --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/abc/_resources.py @@ -0,0 +1,26 @@ +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import Optional, Type, TypeVar + +T = TypeVar("T") + + +class AsyncResource(metaclass=ABCMeta): + """ + Abstract base class for all closeable asynchronous resources. + + Works as an asynchronous context manager which returns the instance itself on enter, and calls + :meth:`aclose` on exit. + """ + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + await self.aclose() + + @abstractmethod + async def aclose(self) -> None: + """Close the resource.""" diff --git a/venv/lib/python3.10/site-packages/anyio/abc/_sockets.py b/venv/lib/python3.10/site-packages/anyio/abc/_sockets.py new file mode 100644 index 0000000..a05151e --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/abc/_sockets.py @@ -0,0 +1,156 @@ +import socket +from abc import abstractmethod +from io import IOBase +from ipaddress import IPv4Address, IPv6Address +from socket import AddressFamily +from types import TracebackType +from typing import ( + Any, AsyncContextManager, Callable, Collection, Dict, List, Mapping, Optional, Tuple, Type, + TypeVar, Union) + +from .._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute +from ._streams import ByteStream, Listener, T_Stream, UnreliableObjectStream +from ._tasks import TaskGroup + +IPAddressType = Union[str, IPv4Address, IPv6Address] +IPSockAddrType = Tuple[str, int] +SockAddrType = Union[IPSockAddrType, str] +UDPPacketType = Tuple[bytes, IPSockAddrType] +T_Retval = TypeVar('T_Retval') + + +class _NullAsyncContextManager: + async def __aenter__(self) -> None: + pass + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return None + + +class SocketAttribute(TypedAttributeSet): + #: the address family of the underlying socket + family: AddressFamily = typed_attribute() + #: the local socket address of the underlying socket + local_address: SockAddrType = typed_attribute() + #: for IP addresses, the local port the underlying socket is bound to + local_port: int = typed_attribute() + #: the underlying stdlib socket object + raw_socket: socket.socket = typed_attribute() + #: the remote address the underlying socket is connected to + remote_address: SockAddrType = typed_attribute() + #: for IP addresses, the remote port the underlying socket is connected to + remote_port: int = typed_attribute() + + +class _SocketProvider(TypedAttributeProvider): + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + from .._core._sockets import convert_ipv6_sockaddr as convert + + attributes: Dict[Any, Callable[[], Any]] = { + SocketAttribute.family: lambda: self._raw_socket.family, + SocketAttribute.local_address: lambda: convert(self._raw_socket.getsockname()), + SocketAttribute.raw_socket: lambda: self._raw_socket + } + try: + peername: Optional[Tuple[str, int]] = convert(self._raw_socket.getpeername()) + except OSError: + peername = None + + # Provide the remote address for connected sockets + if peername is not None: + attributes[SocketAttribute.remote_address] = lambda: peername + + # Provide local and remote ports for IP based sockets + if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): + attributes[SocketAttribute.local_port] = lambda: self._raw_socket.getsockname()[1] + if peername is not None: + remote_port = peername[1] + attributes[SocketAttribute.remote_port] = lambda: remote_port + + return attributes + + @property + @abstractmethod + def _raw_socket(self) -> socket.socket: + pass + + +class SocketStream(ByteStream, _SocketProvider): + """ + Transports bytes over a socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + +class UNIXSocketStream(SocketStream): + @abstractmethod + async def send_fds(self, message: bytes, fds: Collection[Union[int, IOBase]]) -> None: + """ + Send file descriptors along with a message to the peer. + + :param message: a non-empty bytestring + :param fds: a collection of files (either numeric file descriptors or open file or socket + objects) + """ + + @abstractmethod + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + """ + Receive file descriptors along with a message from the peer. + + :param msglen: length of the message to expect from the peer + :param maxfds: maximum number of file descriptors to expect from the peer + :return: a tuple of (message, file descriptors) + """ + + +class SocketListener(Listener[SocketStream], _SocketProvider): + """ + Listens to incoming socket connections. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @abstractmethod + async def accept(self) -> SocketStream: + """Accept an incoming connection.""" + + async def serve(self, handler: Callable[[T_Stream], Any], + task_group: Optional[TaskGroup] = None) -> None: + from .. import create_task_group + + context_manager: AsyncContextManager + if task_group is None: + task_group = context_manager = create_task_group() + else: + # Can be replaced with AsyncExitStack once on py3.7+ + context_manager = _NullAsyncContextManager() + + async with context_manager: + while True: + stream = await self.accept() + task_group.start_soon(handler, stream) + + +class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): + """ + Represents an unconnected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, host: str, port: int) -> None: + """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))).""" + return await self.send((data, (host, port))) + + +class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents an connected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ diff --git a/venv/lib/python3.10/site-packages/anyio/abc/_streams.py b/venv/lib/python3.10/site-packages/anyio/abc/_streams.py new file mode 100644 index 0000000..635b818 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/abc/_streams.py @@ -0,0 +1,187 @@ +from abc import abstractmethod +from typing import Any, Callable, Generic, Optional, TypeVar, Union + +from .._core._exceptions import EndOfStream +from .._core._typedattr import TypedAttributeProvider +from ._resources import AsyncResource +from ._tasks import TaskGroup + +T_Item = TypeVar('T_Item') +T_Stream = TypeVar('T_Stream') + + +class UnreliableObjectReceiveStream(Generic[T_Item], AsyncResource, TypedAttributeProvider): + """ + An interface for receiving objects. + + This interface makes no guarantees that the received messages arrive in the order in which they + were sent, or that no messages are missed. + + Asynchronously iterating over objects of this type will yield objects matching the given type + parameter. + """ + + def __aiter__(self) -> "UnreliableObjectReceiveStream[T_Item]": + return self + + async def __anext__(self) -> T_Item: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self) -> T_Item: + """ + Receive the next item. + + :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly + closed + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectSendStream(Generic[T_Item], AsyncResource, TypedAttributeProvider): + """ + An interface for sending objects. + + This interface makes no guarantees that the messages sent will reach the recipient(s) in the + same order in which they were sent, or at all. + """ + + @abstractmethod + async def send(self, item: T_Item) -> None: + """ + Send an item to the peer(s). + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if the send stream has been explicitly + closed + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectStream(UnreliableObjectReceiveStream[T_Item], + UnreliableObjectSendStream[T_Item]): + """ + A bidirectional message stream which does not guarantee the order or reliability of message + delivery. + """ + + +class ObjectReceiveStream(UnreliableObjectReceiveStream[T_Item]): + """ + A receive message stream which guarantees that messages are received in the same order in + which they were sent, and that no messages are missed. + """ + + +class ObjectSendStream(UnreliableObjectSendStream[T_Item]): + """ + A send message stream which guarantees that messages are delivered in the same order in which + they were sent, without missing any messages in the middle. + """ + + +class ObjectStream(ObjectReceiveStream[T_Item], ObjectSendStream[T_Item], + UnreliableObjectStream[T_Item]): + """ + A bidirectional message stream which guarantees the order and reliability of message delivery. + """ + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this method. + This method is idempotent (does nothing on successive calls). + """ + + +class ByteReceiveStream(AsyncResource, TypedAttributeProvider): + """ + An interface for receiving bytes from a single peer. + + Iterating this byte stream will yield a byte string of arbitrary length, but no more than + 65536 bytes. + """ + + def __aiter__(self) -> 'ByteReceiveStream': + return self + + async def __anext__(self) -> bytes: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self, max_bytes: int = 65536) -> bytes: + """ + Receive at most ``max_bytes`` bytes from the peer. + + .. note:: Implementors of this interface should not return an empty :class:`bytes` object, + and users should ignore them. + + :param max_bytes: maximum number of bytes to receive + :return: the received bytes + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + """ + + +class ByteSendStream(AsyncResource, TypedAttributeProvider): + """An interface for sending bytes to a single peer.""" + + @abstractmethod + async def send(self, item: bytes) -> None: + """ + Send the given bytes to the peer. + + :param item: the bytes to send + """ + + +class ByteStream(ByteReceiveStream, ByteSendStream): + """A bidirectional byte stream.""" + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this method. + This method is idempotent (does nothing on successive calls). + """ + + +#: Type alias for all unreliable bytes-oriented receive streams. +AnyUnreliableByteReceiveStream = Union[UnreliableObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all unreliable bytes-oriented send streams. +AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream] +#: Type alias for all unreliable bytes-oriented streams. +AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream] +#: Type alias for all bytes-oriented receive streams. +AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all bytes-oriented send streams. +AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream] +#: Type alias for all bytes-oriented streams. +AnyByteStream = Union[ObjectStream[bytes], ByteStream] + + +class Listener(Generic[T_Stream], AsyncResource, TypedAttributeProvider): + """An interface for objects that let you accept incoming connections.""" + + @abstractmethod + async def serve(self, handler: Callable[[T_Stream], Any], + task_group: Optional[TaskGroup] = None) -> None: + """ + Accept incoming connections as they come in and start tasks to handle them. + + :param handler: a callable that will be used to handle each accepted connection + :param task_group: the task group that will be used to start tasks for handling each + accepted connection (if omitted, an ad-hoc task group will be created) + """ diff --git a/venv/lib/python3.10/site-packages/anyio/abc/_subprocesses.py b/venv/lib/python3.10/site-packages/anyio/abc/_subprocesses.py new file mode 100644 index 0000000..1e633fb --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/abc/_subprocesses.py @@ -0,0 +1,78 @@ +from abc import abstractmethod +from signal import Signals +from typing import Optional + +from ._resources import AsyncResource +from ._streams import ByteReceiveStream, ByteSendStream + + +class Process(AsyncResource): + """An asynchronous version of :class:`subprocess.Popen`.""" + + @abstractmethod + async def wait(self) -> int: + """ + Wait until the process exits. + + :return: the exit code of the process + """ + + @abstractmethod + def terminate(self) -> None: + """ + Terminates the process, gracefully if possible. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGTERM`` to the process. + + .. seealso:: :meth:`subprocess.Popen.terminate` + """ + + @abstractmethod + def kill(self) -> None: + """ + Kills the process. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGKILL`` to the process. + + .. seealso:: :meth:`subprocess.Popen.kill` + """ + + @abstractmethod + def send_signal(self, signal: Signals) -> None: + """ + Send a signal to the subprocess. + + .. seealso:: :meth:`subprocess.Popen.send_signal` + + :param signal: the signal number (e.g. :data:`signal.SIGHUP`) + """ + + @property + @abstractmethod + def pid(self) -> int: + """The process ID of the process.""" + + @property + @abstractmethod + def returncode(self) -> Optional[int]: + """ + The return code of the process. If the process has not yet terminated, this will be + ``None``. + """ + + @property + @abstractmethod + def stdin(self) -> Optional[ByteSendStream]: + """The stream for the standard input of the process.""" + + @property + @abstractmethod + def stdout(self) -> Optional[ByteReceiveStream]: + """The stream for the standard output of the process.""" + + @property + @abstractmethod + def stderr(self) -> Optional[ByteReceiveStream]: + """The stream for the standard error output of the process.""" diff --git a/venv/lib/python3.10/site-packages/anyio/abc/_tasks.py b/venv/lib/python3.10/site-packages/anyio/abc/_tasks.py new file mode 100644 index 0000000..bed02d8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/abc/_tasks.py @@ -0,0 +1,87 @@ +import typing +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import Any, Callable, Coroutine, Optional, Type, TypeVar +from warnings import warn + +if typing.TYPE_CHECKING: + from anyio._core._tasks import CancelScope + +T_Retval = TypeVar('T_Retval') + + +class TaskStatus(metaclass=ABCMeta): + @abstractmethod + def started(self, value: object = None) -> None: + """ + Signal that the task has started. + + :param value: object passed back to the starter of the task + """ + + +class TaskGroup(metaclass=ABCMeta): + """ + Groups several asynchronous tasks together. + + :ivar cancel_scope: the cancel scope inherited by all child tasks + :vartype cancel_scope: CancelScope + """ + + cancel_scope: 'CancelScope' + + async def spawn(self, func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, name: object = None) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. deprecated:: 3.0 + Use :meth:`start_soon` instead. If your code needs AnyIO 2 compatibility, you + can keep using this until AnyIO 4. + + """ + warn('spawn() is deprecated -- use start_soon() (without the "await") instead', + DeprecationWarning) + self.start_soon(func, *args, name=name) + + @abstractmethod + def start_soon(self, func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, name: object = None) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def start(self, func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, name: object = None) -> object: + """ + Start a new task and wait until it signals for readiness. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + :return: the value passed to ``task_status.started()`` + :raises RuntimeError: if the task finishes without calling ``task_status.started()`` + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def __aenter__(self) -> 'TaskGroup': + """Enter the task group context and allow starting new tasks.""" + + @abstractmethod + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + """Exit the task group context waiting for all tasks to finish.""" diff --git a/venv/lib/python3.10/site-packages/anyio/abc/_testing.py b/venv/lib/python3.10/site-packages/anyio/abc/_testing.py new file mode 100644 index 0000000..2cc9822 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/abc/_testing.py @@ -0,0 +1,37 @@ +import types +from abc import ABCMeta, abstractmethod +from typing import Any, Awaitable, Callable, Dict, Optional, Type, TypeVar + +_T = TypeVar("_T") + + +class TestRunner(metaclass=ABCMeta): + """ + Encapsulates a running event loop. Every call made through this object will use the same event + loop. + """ + + def __enter__(self) -> 'TestRunner': + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[types.TracebackType]) -> Optional[bool]: + self.close() + return None + + @abstractmethod + def close(self) -> None: + """Close the event loop.""" + + @abstractmethod + def call(self, func: Callable[..., Awaitable[_T]], + *args: object, **kwargs: Dict[str, Any]) -> _T: + """ + Call the given function within the backend's event loop. + + :param func: a callable returning an awaitable + :param args: positional arguments to call ``func`` with + :param kwargs: keyword arguments to call ``func`` with + :return: the return value of ``func`` + """ diff --git a/venv/lib/python3.10/site-packages/anyio/from_thread.py b/venv/lib/python3.10/site-packages/anyio/from_thread.py new file mode 100644 index 0000000..0dfcec9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/from_thread.py @@ -0,0 +1,416 @@ +import threading +from asyncio import iscoroutine +from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait +from contextlib import AbstractContextManager, contextmanager +from types import TracebackType +from typing import ( + Any, AsyncContextManager, Callable, ContextManager, Coroutine, Dict, Generator, Iterable, + Optional, Tuple, Type, TypeVar, Union, cast, overload) +from warnings import warn + +from ._core import _eventloop +from ._core._eventloop import get_asynclib, get_cancelled_exc_class, threadlocals +from ._core._synchronization import Event +from ._core._tasks import CancelScope, create_task_group +from .abc._tasks import TaskStatus + +T_Retval = TypeVar('T_Retval') +T_co = TypeVar('T_co') + + +def run(func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object) -> T_Retval: + """ + Call a coroutine function from a worker thread. + + :param func: a coroutine function + :param args: positional arguments for the callable + :return: the return value of the coroutine function + + """ + try: + asynclib = threadlocals.current_async_module + except AttributeError: + raise RuntimeError('This function can only be run from an AnyIO worker thread') + + return asynclib.run_async_from_thread(func, *args) + + +def run_async_from_thread(func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object) -> T_Retval: + warn('run_async_from_thread() has been deprecated, use anyio.from_thread.run() instead', + DeprecationWarning) + return run(func, *args) + + +def run_sync(func: Callable[..., T_Retval], *args: object) -> T_Retval: + """ + Call a function in the event loop thread from a worker thread. + + :param func: a callable + :param args: positional arguments for the callable + :return: the return value of the callable + + """ + try: + asynclib = threadlocals.current_async_module + except AttributeError: + raise RuntimeError('This function can only be run from an AnyIO worker thread') + + return asynclib.run_sync_from_thread(func, *args) + + +def run_sync_from_thread(func: Callable[..., T_Retval], *args: object) -> T_Retval: + warn('run_sync_from_thread() has been deprecated, use anyio.from_thread.run_sync() instead', + DeprecationWarning) + return run_sync(func, *args) + + +class _BlockingAsyncContextManager(AbstractContextManager): + _enter_future: Future + _exit_future: Future + _exit_event: Event + _exit_exc_info: Tuple[Optional[Type[BaseException]], Optional[BaseException], + Optional[TracebackType]] = (None, None, None) + + def __init__(self, async_cm: AsyncContextManager[T_co], portal: 'BlockingPortal'): + self._async_cm = async_cm + self._portal = portal + + async def run_async_cm(self) -> Optional[bool]: + try: + self._exit_event = Event() + value = await self._async_cm.__aenter__() + except BaseException as exc: + self._enter_future.set_exception(exc) + raise + else: + self._enter_future.set_result(value) + + try: + # Wait for the sync context manager to exit. + # This next statement can raise `get_cancelled_exc_class()` if + # something went wrong in a task group in this async context + # manager. + await self._exit_event.wait() + finally: + # In case of cancellation, it could be that we end up here before + # `_BlockingAsyncContextManager.__exit__` is called, and an + # `_exit_exc_info` has been set. + result = await self._async_cm.__aexit__(*self._exit_exc_info) + return result + + def __enter__(self) -> T_co: + self._enter_future = Future() + self._exit_future = self._portal.start_task_soon(self.run_async_cm) + cm = self._enter_future.result() + return cast(T_co, cm) + + def __exit__(self, __exc_type: Optional[Type[BaseException]], + __exc_value: Optional[BaseException], + __traceback: Optional[TracebackType]) -> Optional[bool]: + self._exit_exc_info = __exc_type, __exc_value, __traceback + self._portal.call(self._exit_event.set) + return self._exit_future.result() + + +class _BlockingPortalTaskStatus(TaskStatus): + def __init__(self, future: Future): + self._future = future + + def started(self, value: object = None) -> None: + self._future.set_result(value) + + +class BlockingPortal: + """An object that lets external threads run code in an asynchronous event loop.""" + + def __new__(cls) -> 'BlockingPortal': + return get_asynclib().BlockingPortal() + + def __init__(self) -> None: + self._event_loop_thread_id: Optional[int] = threading.get_ident() + self._stop_event = Event() + self._task_group = create_task_group() + self._cancelled_exc_class = get_cancelled_exc_class() + + async def __aenter__(self) -> 'BlockingPortal': + await self._task_group.__aenter__() + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + await self.stop() + return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) + + def _check_running(self) -> None: + if self._event_loop_thread_id is None: + raise RuntimeError('This portal is not running') + if self._event_loop_thread_id == threading.get_ident(): + raise RuntimeError('This method cannot be called from the event loop thread') + + async def sleep_until_stopped(self) -> None: + """Sleep until :meth:`stop` is called.""" + await self._stop_event.wait() + + async def stop(self, cancel_remaining: bool = False) -> None: + """ + Signal the portal to shut down. + + This marks the portal as no longer accepting new calls and exits from + :meth:`sleep_until_stopped`. + + :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` to let them + finish before returning + + """ + self._event_loop_thread_id = None + self._stop_event.set() + if cancel_remaining: + self._task_group.cancel_scope.cancel() + + async def _call_func(self, func: Callable, args: tuple, kwargs: Dict[str, Any], + future: Future) -> None: + def callback(f: Future) -> None: + if f.cancelled() and self._event_loop_thread_id not in (None, threading.get_ident()): + self.call(scope.cancel) + + try: + retval = func(*args, **kwargs) + if iscoroutine(retval): + with CancelScope() as scope: + if future.cancelled(): + scope.cancel() + else: + future.add_done_callback(callback) + + retval = await retval + except self._cancelled_exc_class: + future.cancel() + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + # Let base exceptions fall through + if not isinstance(exc, Exception): + raise + else: + if not future.cancelled(): + future.set_result(retval) + finally: + scope = None # type: ignore[assignment] + + def _spawn_task_from_thread(self, func: Callable, args: tuple, kwargs: Dict[str, Any], + name: object, future: Future) -> None: + """ + Spawn a new task using the given callable. + + Implementors must ensure that the future is resolved when the task finishes. + + :param func: a callable + :param args: positional arguments to be passed to the callable + :param kwargs: keyword arguments to be passed to the callable + :param name: name of the task (will be coerced to a string if not ``None``) + :param future: a future that will resolve to the return value of the callable, or the + exception raised during its execution + + """ + raise NotImplementedError + + @overload + def call(self, func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object) -> T_Retval: + ... + + @overload + def call(self, func: Callable[..., T_Retval], *args: object) -> T_Retval: + ... + + def call(self, func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object) -> T_Retval: + """ + Call the given function in the event loop thread. + + If the callable returns a coroutine object, it is awaited on. + + :param func: any callable + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + """ + return cast(T_Retval, self.start_task_soon(func, *args).result()) + + @overload + def spawn_task(self, func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object, name: object = None) -> "Future[T_Retval]": + ... + + @overload + def spawn_task(self, func: Callable[..., T_Retval], + *args: object, name: object = None) -> "Future[T_Retval]": ... + + def spawn_task(self, func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object, name: object = None) -> "Future[T_Retval]": + """ + Start a task in the portal's task group. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the task completes + successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + .. versionadded:: 2.1 + .. deprecated:: 3.0 + Use :meth:`start_task_soon` instead. If your code needs AnyIO 2 compatibility, you + can keep using this until AnyIO 4. + + """ + warn('spawn_task() is deprecated -- use start_task_soon() instead', DeprecationWarning) + return self.start_task_soon(func, *args, name=name) # type: ignore[arg-type] + + @overload + def start_task_soon(self, func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object, name: object = None) -> "Future[T_Retval]": + ... + + @overload + def start_task_soon(self, func: Callable[..., T_Retval], + *args: object, name: object = None) -> "Future[T_Retval]": ... + + def start_task_soon(self, func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object, name: object = None) -> "Future[T_Retval]": + """ + Start a task in the portal's task group. + + The task will be run inside a cancel scope which can be cancelled by cancelling the + returned future. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the task completes + successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + .. versionadded:: 3.0 + + """ + self._check_running() + f: Future = Future() + self._spawn_task_from_thread(func, args, {}, name, f) + return f + + def start_task(self, func: Callable[..., Coroutine[Any, Any, Any]], *args: object, + name: object = None) -> Tuple['Future[Any]', Any]: + """ + Start a task in the portal's task group and wait until it signals for readiness. + + This method works the same way as :meth:`TaskGroup.start`. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a tuple of (future, task_status_value) where the ``task_status_value`` is the + value passed to ``task_status.started()`` from within the target function + + .. versionadded:: 3.0 + + """ + def task_done(future: Future) -> None: + if not task_status_future.done(): + if future.cancelled(): + task_status_future.cancel() + elif future.exception(): + task_status_future.set_exception(future.exception()) + else: + exc = RuntimeError('Task exited without calling task_status.started()') + task_status_future.set_exception(exc) + + self._check_running() + task_status_future: Future = Future() + task_status = _BlockingPortalTaskStatus(task_status_future) + f: Future = Future() + f.add_done_callback(task_done) + self._spawn_task_from_thread(func, args, {'task_status': task_status}, name, f) + return f, task_status_future.result() + + def wrap_async_context_manager(self, cm: AsyncContextManager[T_co]) -> ContextManager[T_co]: + """ + Wrap an async context manager as a synchronous context manager via this portal. + + Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping in the + middle until the synchronous context manager exits. + + :param cm: an asynchronous context manager + :return: a synchronous context manager + + .. versionadded:: 2.1 + + """ + return _BlockingAsyncContextManager(cm, self) + + +def create_blocking_portal() -> BlockingPortal: + """ + Create a portal for running functions in the event loop thread from external threads. + + Use this function in asynchronous code when you need to allow external threads access to the + event loop where your asynchronous code is currently running. + + .. deprecated:: 3.0 + Use :class:`.BlockingPortal` directly. + + """ + warn('create_blocking_portal() has been deprecated -- use anyio.from_thread.BlockingPortal() ' + 'directly', DeprecationWarning) + return BlockingPortal() + + +@contextmanager +def start_blocking_portal( + backend: str = 'asyncio', + backend_options: Optional[Dict[str, Any]] = None) -> Generator[BlockingPortal, Any, None]: + """ + Start a new event loop in a new thread and run a blocking portal in its main task. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + :return: a context manager that yields a blocking portal + + .. versionchanged:: 3.0 + Usage as a context manager is now required. + + """ + async def run_portal() -> None: + async with BlockingPortal() as portal_: + if future.set_running_or_notify_cancel(): + future.set_result(portal_) + await portal_.sleep_until_stopped() + + future: Future[BlockingPortal] = Future() + with ThreadPoolExecutor(1) as executor: + run_future = executor.submit(_eventloop.run, run_portal, backend=backend, + backend_options=backend_options) + try: + wait(cast(Iterable[Future], [run_future, future]), return_when=FIRST_COMPLETED) + except BaseException: + future.cancel() + run_future.cancel() + raise + + if future.done(): + portal = future.result() + try: + yield portal + except BaseException: + portal.call(portal.stop, True) + raise + + portal.call(portal.stop, False) + + run_future.result() diff --git a/venv/lib/python3.10/site-packages/anyio/lowlevel.py b/venv/lib/python3.10/site-packages/anyio/lowlevel.py new file mode 100644 index 0000000..446e9e7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/lowlevel.py @@ -0,0 +1,160 @@ +import enum +import sys +from dataclasses import dataclass +from typing import Any, Dict, Generic, Set, TypeVar, Union, overload +from weakref import WeakKeyDictionary + +from ._core._eventloop import get_asynclib + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + +T = TypeVar('T') +D = TypeVar('D') + + +async def checkpoint() -> None: + """ + Check for cancellation and allow the scheduler to switch to another task. + + Equivalent to (but more efficient than):: + + await checkpoint_if_cancelled() + await cancel_shielded_checkpoint() + + .. versionadded:: 3.0 + + """ + await get_asynclib().checkpoint() + + +async def checkpoint_if_cancelled() -> None: + """ + Enter a checkpoint if the enclosing cancel scope has been cancelled. + + This does not allow the scheduler to switch to a different task. + + .. versionadded:: 3.0 + + """ + await get_asynclib().checkpoint_if_cancelled() + + +async def cancel_shielded_checkpoint() -> None: + """ + Allow the scheduler to switch to another task but without checking for cancellation. + + Equivalent to (but potentially more efficient than):: + + with CancelScope(shield=True): + await checkpoint() + + .. versionadded:: 3.0 + + """ + await get_asynclib().cancel_shielded_checkpoint() + + +def current_token() -> object: + """Return a backend specific token object that can be used to get back to the event loop.""" + return get_asynclib().current_token() + + +_run_vars = WeakKeyDictionary() # type: WeakKeyDictionary[Any, Dict[str, Any]] +_token_wrappers: Dict[Any, '_TokenWrapper'] = {} + + +@dataclass(frozen=True) +class _TokenWrapper: + __slots__ = '_token', '__weakref__' + _token: object + + +class _NoValueSet(enum.Enum): + NO_VALUE_SET = enum.auto() + + +class RunvarToken(Generic[T]): + __slots__ = '_var', '_value', '_redeemed' + + def __init__(self, var: 'RunVar[T]', value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]]): + self._var = var + self._value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = value + self._redeemed = False + + +class RunVar(Generic[T]): + """Like a :class:`~contextvars.ContextVar`, expect scoped to the running event loop.""" + __slots__ = '_name', '_default' + + NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET + + _token_wrappers: Set[_TokenWrapper] = set() + + def __init__(self, name: str, + default: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET): + self._name = name + self._default = default + + @property + def _current_vars(self) -> Dict[str, T]: + token = current_token() + while True: + try: + return _run_vars[token] + except TypeError: + # Happens when token isn't weak referable (TrioToken). + # This workaround does mean that some memory will leak on Trio until the problem + # is fixed on their end. + token = _TokenWrapper(token) + self._token_wrappers.add(token) + except KeyError: + run_vars = _run_vars[token] = {} + return run_vars + + @overload + def get(self, default: D) -> Union[T, D]: ... + + @overload + def get(self) -> T: ... + + def get( + self, default: Union[D, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET + ) -> Union[T, D]: + try: + return self._current_vars[self._name] + except KeyError: + if default is not RunVar.NO_VALUE_SET: + return default + elif self._default is not RunVar.NO_VALUE_SET: + return self._default + + raise LookupError(f'Run variable "{self._name}" has no value and no default set') + + def set(self, value: T) -> RunvarToken[T]: + current_vars = self._current_vars + token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET)) + current_vars[self._name] = value + return token + + def reset(self, token: RunvarToken[T]) -> None: + if token._var is not self: + raise ValueError('This token does not belong to this RunVar') + + if token._redeemed: + raise ValueError('This token has already been used') + + if token._value is _NoValueSet.NO_VALUE_SET: + try: + del self._current_vars[self._name] + except KeyError: + pass + else: + self._current_vars[self._name] = token._value + + token._redeemed = True + + def __repr__(self) -> str: + return f'' diff --git a/venv/lib/python3.10/site-packages/anyio/py.typed b/venv/lib/python3.10/site-packages/anyio/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/anyio/pytest_plugin.py b/venv/lib/python3.10/site-packages/anyio/pytest_plugin.py new file mode 100644 index 0000000..d0cc2fb --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/pytest_plugin.py @@ -0,0 +1,152 @@ +from contextlib import contextmanager +from inspect import isasyncgenfunction, iscoroutinefunction +from typing import TYPE_CHECKING, Any, Dict, Iterator, Optional, Tuple, cast + +import pytest +import sniffio + +from ._core._eventloop import get_all_backends, get_asynclib +from .abc import TestRunner + +if TYPE_CHECKING: + from _pytest.config import Config + +_current_runner: Optional[TestRunner] = None + + +def extract_backend_and_options(backend: object) -> Tuple[str, Dict[str, Any]]: + if isinstance(backend, str): + return backend, {} + elif isinstance(backend, tuple) and len(backend) == 2: + if isinstance(backend[0], str) and isinstance(backend[1], dict): + return cast(Tuple[str, Dict[str, Any]], backend) + + raise TypeError('anyio_backend must be either a string or tuple of (string, dict)') + + +@contextmanager +def get_runner(backend_name: str, backend_options: Dict[str, Any]) -> Iterator[TestRunner]: + global _current_runner + if _current_runner: + yield _current_runner + return + + asynclib = get_asynclib(backend_name) + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async library + token = sniffio.current_async_library_cvar.set(backend_name) + + try: + backend_options = backend_options or {} + with asynclib.TestRunner(**backend_options) as runner: + _current_runner = runner + yield runner + finally: + _current_runner = None + if token: + sniffio.current_async_library_cvar.reset(token) + + +def pytest_configure(config: "Config") -> None: + config.addinivalue_line('markers', 'anyio: mark the (coroutine function) test to be run ' + 'asynchronously via anyio.') + + +def pytest_fixture_setup(fixturedef: Any, request: Any) -> None: + def wrapper(*args, anyio_backend, **kwargs): # type: ignore[no-untyped-def] + backend_name, backend_options = extract_backend_and_options(anyio_backend) + if has_backend_arg: + kwargs['anyio_backend'] = anyio_backend + + with get_runner(backend_name, backend_options) as runner: + if isasyncgenfunction(func): + gen = func(*args, **kwargs) + try: + value = runner.call(gen.asend, None) + except StopAsyncIteration: + raise RuntimeError('Async generator did not yield') + + yield value + + try: + runner.call(gen.asend, None) + except StopAsyncIteration: + pass + else: + runner.call(gen.aclose) + raise RuntimeError('Async generator fixture did not stop') + else: + yield runner.call(func, *args, **kwargs) + + # Only apply this to coroutine functions and async generator functions in requests that involve + # the anyio_backend fixture + func = fixturedef.func + if isasyncgenfunction(func) or iscoroutinefunction(func): + if 'anyio_backend' in request.fixturenames: + has_backend_arg = 'anyio_backend' in fixturedef.argnames + fixturedef.func = wrapper + if not has_backend_arg: + fixturedef.argnames += ('anyio_backend',) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None: + if collector.istestfunction(obj, name): + inner_func = obj.hypothesis.inner_test if hasattr(obj, 'hypothesis') else obj + if iscoroutinefunction(inner_func): + marker = collector.get_closest_marker('anyio') + own_markers = getattr(obj, 'pytestmark', ()) + if marker or any(marker.name == 'anyio' for marker in own_markers): + pytest.mark.usefixtures('anyio_backend')(obj) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pyfunc_call(pyfuncitem: Any) -> Optional[bool]: + def run_with_hypothesis(**kwargs: Any) -> None: + with get_runner(backend_name, backend_options) as runner: + runner.call(original_func, **kwargs) + + backend = pyfuncitem.funcargs.get('anyio_backend') + if backend: + backend_name, backend_options = extract_backend_and_options(backend) + + if hasattr(pyfuncitem.obj, 'hypothesis'): + # Wrap the inner test function unless it's already wrapped + original_func = pyfuncitem.obj.hypothesis.inner_test + if original_func.__qualname__ != run_with_hypothesis.__qualname__: + if iscoroutinefunction(original_func): + pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis + + return None + + if iscoroutinefunction(pyfuncitem.obj): + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + with get_runner(backend_name, backend_options) as runner: + runner.call(pyfuncitem.obj, **testargs) + + return True + + return None + + +@pytest.fixture(params=get_all_backends()) +def anyio_backend(request: Any) -> Any: + return request.param + + +@pytest.fixture +def anyio_backend_name(anyio_backend: Any) -> str: + if isinstance(anyio_backend, str): + return anyio_backend + else: + return anyio_backend[0] + + +@pytest.fixture +def anyio_backend_options(anyio_backend: Any) -> Dict[str, Any]: + if isinstance(anyio_backend, str): + return {} + else: + return anyio_backend[1] diff --git a/venv/lib/python3.10/site-packages/anyio/streams/__init__.py b/venv/lib/python3.10/site-packages/anyio/streams/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..c799386 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/buffered.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/buffered.cpython-310.pyc new file mode 100644 index 0000000..c60b018 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/buffered.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/file.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/file.cpython-310.pyc new file mode 100644 index 0000000..8efdcba Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/file.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/memory.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/memory.cpython-310.pyc new file mode 100644 index 0000000..a506c87 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/memory.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/stapled.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/stapled.cpython-310.pyc new file mode 100644 index 0000000..671d205 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/stapled.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/text.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/text.cpython-310.pyc new file mode 100644 index 0000000..78debf6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/text.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/tls.cpython-310.pyc b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/tls.cpython-310.pyc new file mode 100644 index 0000000..8b7654b Binary files /dev/null and b/venv/lib/python3.10/site-packages/anyio/streams/__pycache__/tls.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/anyio/streams/buffered.py b/venv/lib/python3.10/site-packages/anyio/streams/buffered.py new file mode 100644 index 0000000..ee220ca --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/streams/buffered.py @@ -0,0 +1,116 @@ +from dataclasses import dataclass, field +from typing import Any, Callable, Mapping + +from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead +from ..abc import AnyByteReceiveStream, ByteReceiveStream + + +@dataclass(eq=False) +class BufferedByteReceiveStream(ByteReceiveStream): + """ + Wraps any bytes-based receive stream and uses a buffer to provide sophisticated receiving + capabilities in the form of a byte stream. + """ + + receive_stream: AnyByteReceiveStream + _buffer: bytearray = field(init=False, default_factory=bytearray) + _closed: bool = field(init=False, default=False) + + async def aclose(self) -> None: + await self.receive_stream.aclose() + self._closed = True + + @property + def buffer(self) -> bytes: + """The bytes currently in the buffer.""" + return bytes(self._buffer) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.receive_stream.extra_attributes + + async def receive(self, max_bytes: int = 65536) -> bytes: + if self._closed: + raise ClosedResourceError + + if self._buffer: + chunk = bytes(self._buffer[:max_bytes]) + del self._buffer[:max_bytes] + return chunk + elif isinstance(self.receive_stream, ByteReceiveStream): + return await self.receive_stream.receive(max_bytes) + else: + # With a bytes-oriented object stream, we need to handle any surplus bytes we get from + # the receive() call + chunk = await self.receive_stream.receive() + if len(chunk) > max_bytes: + # Save the surplus bytes in the buffer + self._buffer.extend(chunk[max_bytes:]) + return chunk[:max_bytes] + else: + return chunk + + async def receive_exactly(self, nbytes: int) -> bytes: + """ + Read exactly the given amount of bytes from the stream. + + :param nbytes: the number of bytes to read + :return: the bytes read + :raises ~anyio.IncompleteRead: if the stream was closed before the requested + amount of bytes could be read from the stream + + """ + while True: + remaining = nbytes - len(self._buffer) + if remaining <= 0: + retval = self._buffer[:nbytes] + del self._buffer[:nbytes] + return bytes(retval) + + try: + if isinstance(self.receive_stream, ByteReceiveStream): + chunk = await self.receive_stream.receive(remaining) + else: + chunk = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + self._buffer.extend(chunk) + + async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: + """ + Read from the stream until the delimiter is found or max_bytes have been read. + + :param delimiter: the marker to look for in the stream + :param max_bytes: maximum number of bytes that will be read before raising + :exc:`~anyio.DelimiterNotFound` + :return: the bytes read (not including the delimiter) + :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter + was found + :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the + bytes read up to the maximum allowed + + """ + delimiter_size = len(delimiter) + offset = 0 + while True: + # Check if the delimiter can be found in the current buffer + index = self._buffer.find(delimiter, offset) + if index >= 0: + found = self._buffer[:index] + del self._buffer[:index + len(delimiter):] + return bytes(found) + + # Check if the buffer is already at or over the limit + if len(self._buffer) >= max_bytes: + raise DelimiterNotFound(max_bytes) + + # Read more data into the buffer from the socket + try: + data = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + # Move the offset forward and add the new data to the buffer + offset = max(len(self._buffer) - delimiter_size + 1, 0) + self._buffer.extend(data) diff --git a/venv/lib/python3.10/site-packages/anyio/streams/file.py b/venv/lib/python3.10/site-packages/anyio/streams/file.py new file mode 100644 index 0000000..9dc0739 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/streams/file.py @@ -0,0 +1,139 @@ +from io import SEEK_SET, UnsupportedOperation +from os import PathLike +from pathlib import Path +from typing import Any, BinaryIO, Callable, Dict, Mapping, Union, cast + +from .. import ( + BrokenResourceError, ClosedResourceError, EndOfStream, TypedAttributeSet, to_thread, + typed_attribute) +from ..abc import ByteReceiveStream, ByteSendStream + + +class FileStreamAttribute(TypedAttributeSet): + #: the open file descriptor + file: BinaryIO = typed_attribute() + #: the path of the file on the file system, if available (file must be a real file) + path: Path = typed_attribute() + #: the file number, if available (file must be a real file or a TTY) + fileno: int = typed_attribute() + + +class _BaseFileStream: + def __init__(self, file: BinaryIO): + self._file = file + + async def aclose(self) -> None: + await to_thread.run_sync(self._file.close) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: Dict[Any, Callable[[], Any]] = { + FileStreamAttribute.file: lambda: self._file, + } + + if hasattr(self._file, 'name'): + attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) + + try: + self._file.fileno() + except UnsupportedOperation: + pass + else: + attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() + + return attributes + + +class FileReadStream(_BaseFileStream, ByteReceiveStream): + """ + A byte stream that reads from a file in the file system. + + :param file: a file that has been opened for reading in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: Union[str, 'PathLike[str]']) -> 'FileReadStream': + """ + Create a file read stream by opening the given file. + + :param path: path of the file to read from + + """ + file = await to_thread.run_sync(Path(path).open, 'rb') + return cls(cast(BinaryIO, file)) + + async def receive(self, max_bytes: int = 65536) -> bytes: + try: + data = await to_thread.run_sync(self._file.read, max_bytes) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc + + if data: + return data + else: + raise EndOfStream + + async def seek(self, position: int, whence: int = SEEK_SET) -> int: + """ + Seek the file to the given position. + + .. seealso:: :meth:`io.IOBase.seek` + + .. note:: Not all file descriptors are seekable. + + :param position: position to seek the file to + :param whence: controls how ``position`` is interpreted + :return: the new absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.seek, position, whence) + + async def tell(self) -> int: + """ + Return the current stream position. + + .. note:: Not all file descriptors are seekable. + + :return: the current absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.tell) + + +class FileWriteStream(_BaseFileStream, ByteSendStream): + """ + A byte stream that writes to a file in the file system. + + :param file: a file that has been opened for writing in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: Union[str, 'PathLike[str]'], + append: bool = False) -> 'FileWriteStream': + """ + Create a file write stream by opening the given file for writing. + + :param path: path of the file to write to + :param append: if ``True``, open the file for appending; if ``False``, any existing file + at the given path will be truncated + + """ + mode = 'ab' if append else 'wb' + file = await to_thread.run_sync(Path(path).open, mode) + return cls(cast(BinaryIO, file)) + + async def send(self, item: bytes) -> None: + try: + await to_thread.run_sync(self._file.write, item) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc diff --git a/venv/lib/python3.10/site-packages/anyio/streams/memory.py b/venv/lib/python3.10/site-packages/anyio/streams/memory.py new file mode 100644 index 0000000..259b7dd --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/streams/memory.py @@ -0,0 +1,256 @@ +from collections import OrderedDict, deque +from dataclasses import dataclass, field +from types import TracebackType +from typing import Deque, Generic, List, NamedTuple, Optional, Type, TypeVar + +from .. import ( + BrokenResourceError, ClosedResourceError, EndOfStream, WouldBlock, get_cancelled_exc_class) +from .._core._compat import DeprecatedAwaitable +from ..abc import Event, ObjectReceiveStream, ObjectSendStream +from ..lowlevel import checkpoint + +T_Item = TypeVar('T_Item') + + +class MemoryObjectStreamStatistics(NamedTuple): + current_buffer_used: int #: number of items stored in the buffer + #: maximum number of items that can be stored on this stream (or :data:`math.inf`) + max_buffer_size: float + open_send_streams: int #: number of unclosed clones of the send stream + open_receive_streams: int #: number of unclosed clones of the receive stream + tasks_waiting_send: int #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` + tasks_waiting_receive: int + + +@dataclass(eq=False) +class MemoryObjectStreamState(Generic[T_Item]): + max_buffer_size: float = field() + buffer: Deque[T_Item] = field(init=False, default_factory=deque) + open_send_channels: int = field(init=False, default=0) + open_receive_channels: int = field(init=False, default=0) + waiting_receivers: 'OrderedDict[Event, List[T_Item]]' = field(init=False, + default_factory=OrderedDict) + waiting_senders: 'OrderedDict[Event, T_Item]' = field(init=False, default_factory=OrderedDict) + + def statistics(self) -> MemoryObjectStreamStatistics: + return MemoryObjectStreamStatistics( + len(self.buffer), self.max_buffer_size, self.open_send_channels, + self.open_receive_channels, len(self.waiting_senders), len(self.waiting_receivers)) + + +@dataclass(eq=False) +class MemoryObjectReceiveStream(Generic[T_Item], ObjectReceiveStream[T_Item]): + _state: MemoryObjectStreamState[T_Item] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_receive_channels += 1 + + def receive_nowait(self) -> T_Item: + """ + Receive the next item if it can be done without waiting. + + :return: the received item + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been + closed from the sending end + :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks + waiting to send + + """ + if self._closed: + raise ClosedResourceError + + if self._state.waiting_senders: + # Get the item from the next sender + send_event, item = self._state.waiting_senders.popitem(last=False) + self._state.buffer.append(item) + send_event.set() + + if self._state.buffer: + return self._state.buffer.popleft() + elif not self._state.open_send_channels: + raise EndOfStream + + raise WouldBlock + + async def receive(self) -> T_Item: + await checkpoint() + try: + return self.receive_nowait() + except WouldBlock: + # Add ourselves in the queue + receive_event = Event() + container: List[T_Item] = [] + self._state.waiting_receivers[receive_event] = container + + try: + await receive_event.wait() + except get_cancelled_exc_class(): + # Ignore the immediate cancellation if we already received an item, so as not to + # lose it + if not container: + raise + finally: + self._state.waiting_receivers.pop(receive_event, None) + + if container: + return container[0] + else: + raise EndOfStream + + def clone(self) -> 'MemoryObjectReceiveStream[T_Item]': + """ + Create a clone of this receive stream. + + Each clone can be closed separately. Only when all clones have been closed will the + receiving end of the memory stream be considered closed by the sending ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectReceiveStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special case for the + benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_receive_channels -= 1 + if self._state.open_receive_channels == 0: + send_events = list(self._state.waiting_senders.keys()) + for event in send_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> 'MemoryObjectReceiveStream[T_Item]': + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.close() + + +@dataclass(eq=False) +class MemoryObjectSendStream(Generic[T_Item], ObjectSendStream[T_Item]): + _state: MemoryObjectStreamState[T_Item] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_send_channels += 1 + + def send_nowait(self, item: T_Item) -> DeprecatedAwaitable: + """ + Send an item immediately if it can be done without waiting. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting + to receive + + """ + if self._closed: + raise ClosedResourceError + if not self._state.open_receive_channels: + raise BrokenResourceError + + if self._state.waiting_receivers: + receive_event, container = self._state.waiting_receivers.popitem(last=False) + container.append(item) + receive_event.set() + elif len(self._state.buffer) < self._state.max_buffer_size: + self._state.buffer.append(item) + else: + raise WouldBlock + + return DeprecatedAwaitable(self.send_nowait) + + async def send(self, item: T_Item) -> None: + await checkpoint() + try: + self.send_nowait(item) + except WouldBlock: + # Wait until there's someone on the receiving end + send_event = Event() + self._state.waiting_senders[send_event] = item + try: + await send_event.wait() + except BaseException: + self._state.waiting_senders.pop(send_event, None) # type: ignore[arg-type] + raise + + if self._state.waiting_senders.pop(send_event, None): # type: ignore[arg-type] + raise BrokenResourceError + + def clone(self) -> 'MemoryObjectSendStream[T_Item]': + """ + Create a clone of this send stream. + + Each clone can be closed separately. Only when all clones have been closed will the + sending end of the memory stream be considered closed by the receiving ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectSendStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special case for the + benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_send_channels -= 1 + if self._state.open_send_channels == 0: + receive_events = list(self._state.waiting_receivers.keys()) + self._state.waiting_receivers.clear() + for event in receive_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> 'MemoryObjectSendStream[T_Item]': + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.close() diff --git a/venv/lib/python3.10/site-packages/anyio/streams/stapled.py b/venv/lib/python3.10/site-packages/anyio/streams/stapled.py new file mode 100644 index 0000000..0d5e7fb --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/streams/stapled.py @@ -0,0 +1,124 @@ +from dataclasses import dataclass +from typing import Any, Callable, Generic, List, Mapping, Optional, Sequence, TypeVar + +from ..abc import ( + ByteReceiveStream, ByteSendStream, ByteStream, Listener, ObjectReceiveStream, ObjectSendStream, + ObjectStream, TaskGroup) + +T_Item = TypeVar('T_Item') +T_Stream = TypeVar('T_Stream') + + +@dataclass(eq=False) +class StapledByteStream(ByteStream): + """ + Combines two byte streams into a single, bidirectional byte stream. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param ByteSendStream send_stream: the sending byte stream + :param ByteReceiveStream receive_stream: the receiving byte stream + """ + + send_stream: ByteSendStream + receive_stream: ByteReceiveStream + + async def receive(self, max_bytes: int = 65536) -> bytes: + return await self.receive_stream.receive(max_bytes) + + async def send(self, item: bytes) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return {**self.send_stream.extra_attributes, **self.receive_stream.extra_attributes} + + +@dataclass(eq=False) +class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): + """ + Combines two object streams into a single, bidirectional object stream. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param ObjectSendStream send_stream: the sending object stream + :param ObjectReceiveStream receive_stream: the receiving object stream + """ + + send_stream: ObjectSendStream[T_Item] + receive_stream: ObjectReceiveStream[T_Item] + + async def receive(self) -> T_Item: + return await self.receive_stream.receive() + + async def send(self, item: T_Item) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return {**self.send_stream.extra_attributes, **self.receive_stream.extra_attributes} + + +@dataclass(eq=False) +class MultiListener(Generic[T_Stream], Listener[T_Stream]): + """ + Combines multiple listeners into one, serving connections from all of them at once. + + Any MultiListeners in the given collection of listeners will have their listeners moved into + this one. + + Extra attributes are provided from each listener, with each successive listener overriding any + conflicting attributes from the previous one. + + :param listeners: listeners to serve + :type listeners: Sequence[Listener[T_Stream]] + """ + + listeners: Sequence[Listener[T_Stream]] + + def __post_init__(self) -> None: + listeners: List[Listener[T_Stream]] = [] + for listener in self.listeners: + if isinstance(listener, MultiListener): + listeners.extend(listener.listeners) + del listener.listeners[:] # type: ignore[attr-defined] + else: + listeners.append(listener) + + self.listeners = listeners + + async def serve(self, handler: Callable[[T_Stream], Any], + task_group: Optional[TaskGroup] = None) -> None: + from .. import create_task_group + + async with create_task_group() as tg: + for listener in self.listeners: + tg.start_soon(listener.serve, handler, task_group) + + async def aclose(self) -> None: + for listener in self.listeners: + await listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict = {} + for listener in self.listeners: + attributes.update(listener.extra_attributes) + + return attributes diff --git a/venv/lib/python3.10/site-packages/anyio/streams/text.py b/venv/lib/python3.10/site-packages/anyio/streams/text.py new file mode 100644 index 0000000..d352b5b --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/streams/text.py @@ -0,0 +1,130 @@ +import codecs +from dataclasses import InitVar, dataclass, field +from typing import Any, Callable, Mapping, Tuple + +from ..abc import ( + AnyByteReceiveStream, AnyByteSendStream, AnyByteStream, ObjectReceiveStream, ObjectSendStream, + ObjectStream) + + +@dataclass(eq=False) +class TextReceiveStream(ObjectReceiveStream[str]): + """ + Stream wrapper that decodes bytes to strings using the given encoding. + + Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any completely + received unicode characters as soon as they come in. + + :param transport_stream: any bytes-based receive stream + :param encoding: character encoding to use for decoding bytes to strings (defaults to + ``utf-8``) + :param errors: handling scheme for decoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteReceiveStream + encoding: InitVar[str] = 'utf-8' + errors: InitVar[str] = 'strict' + _decoder: codecs.IncrementalDecoder = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + decoder_class = codecs.getincrementaldecoder(encoding) + self._decoder = decoder_class(errors=errors) + + async def receive(self) -> str: + while True: + chunk = await self.transport_stream.receive() + decoded = self._decoder.decode(chunk) + if decoded: + return decoded + + async def aclose(self) -> None: + await self.transport_stream.aclose() + self._decoder.reset() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextSendStream(ObjectSendStream[str]): + """ + Sends strings to the wrapped stream as bytes using the given encoding. + + :param AnyByteSendStream transport_stream: any bytes-based send stream + :param str encoding: character encoding to use for encoding strings to bytes (defaults to + ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteSendStream + encoding: InitVar[str] = 'utf-8' + errors: str = 'strict' + _encoder: Callable[..., Tuple[bytes, int]] = field(init=False) + + def __post_init__(self, encoding: str) -> None: + self._encoder = codecs.getencoder(encoding) + + async def send(self, item: str) -> None: + encoded = self._encoder(item, self.errors)[0] + await self.transport_stream.send(encoded) + + async def aclose(self) -> None: + await self.transport_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextStream(ObjectStream[str]): + """ + A bidirectional stream that decodes bytes to strings on receive and encodes strings to bytes on + send. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param AnyByteStream transport_stream: any bytes-based stream + :param str encoding: character encoding to use for encoding/decoding strings to/from bytes + (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteStream + encoding: InitVar[str] = 'utf-8' + errors: InitVar[str] = 'strict' + _receive_stream: TextReceiveStream = field(init=False) + _send_stream: TextSendStream = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + self._receive_stream = TextReceiveStream(self.transport_stream, encoding=encoding, + errors=errors) + self._send_stream = TextSendStream(self.transport_stream, encoding=encoding, errors=errors) + + async def receive(self) -> str: + return await self._receive_stream.receive() + + async def send(self, item: str) -> None: + await self._send_stream.send(item) + + async def send_eof(self) -> None: + await self.transport_stream.send_eof() + + async def aclose(self) -> None: + await self._send_stream.aclose() + await self._receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return {**self._send_stream.extra_attributes, **self._receive_stream.extra_attributes} diff --git a/venv/lib/python3.10/site-packages/anyio/streams/tls.py b/venv/lib/python3.10/site-packages/anyio/streams/tls.py new file mode 100644 index 0000000..b235426 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/streams/tls.py @@ -0,0 +1,281 @@ +import logging +import re +import ssl +from dataclasses import dataclass +from functools import wraps +from typing import Any, Callable, Dict, List, Mapping, Optional, Tuple, TypeVar, Union + +from .. import BrokenResourceError, EndOfStream, aclose_forcefully, get_cancelled_exc_class +from .._core._typedattr import TypedAttributeSet, typed_attribute +from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup + +T_Retval = TypeVar('T_Retval') +_PCTRTT = Tuple[Tuple[str, str], ...] +_PCTRTTT = Tuple[_PCTRTT, ...] + + +class TLSAttribute(TypedAttributeSet): + """Contains Transport Layer Security related attributes.""" + #: the selected ALPN protocol + alpn_protocol: Optional[str] = typed_attribute() + #: the channel binding for type ``tls-unique`` + channel_binding_tls_unique: bytes = typed_attribute() + #: the selected cipher + cipher: Tuple[str, str, int] = typed_attribute() + #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` for more + #: information) + peer_certificate: Optional[Dict[str, Union[str, _PCTRTTT, _PCTRTT]]] = typed_attribute() + #: the peer certificate in binary form + peer_certificate_binary: Optional[bytes] = typed_attribute() + #: ``True`` if this is the server side of the connection + server_side: bool = typed_attribute() + #: ciphers shared between both ends of the TLS connection + shared_ciphers: List[Tuple[str, str, int]] = typed_attribute() + #: the :class:`~ssl.SSLObject` used for encryption + ssl_object: ssl.SSLObject = typed_attribute() + #: ``True`` if this stream does (and expects) a closing TLS handshake when the stream is being + #: closed + standard_compatible: bool = typed_attribute() + #: the TLS protocol version (e.g. ``TLSv1.2``) + tls_version: str = typed_attribute() + + +@dataclass(eq=False) +class TLSStream(ByteStream): + """ + A stream wrapper that encrypts all sent data and decrypts received data. + + This class has no public initializer; use :meth:`wrap` instead. + All extra attributes from :class:`~TLSAttribute` are supported. + + :var AnyByteStream transport_stream: the wrapped stream + + """ + transport_stream: AnyByteStream + standard_compatible: bool + _ssl_object: ssl.SSLObject + _read_bio: ssl.MemoryBIO + _write_bio: ssl.MemoryBIO + + @classmethod + async def wrap(cls, transport_stream: AnyByteStream, *, server_side: Optional[bool] = None, + hostname: Optional[str] = None, ssl_context: Optional[ssl.SSLContext] = None, + standard_compatible: bool = True) -> 'TLSStream': + """ + Wrap an existing stream with Transport Layer Security. + + This performs a TLS handshake with the peer. + + :param transport_stream: a bytes-transporting stream to wrap + :param server_side: ``True`` if this is the server side of the connection, ``False`` if + this is the client side (if omitted, will be set to ``False`` if ``hostname`` has been + provided, ``False`` otherwise). Used only to create a default context when an explicit + context has not been provided. + :param hostname: host name of the peer (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure default will be + created) + :param standard_compatible: if ``False``, skip the closing handshake when closing the + connection, and don't raise an exception if the peer does the same + :raises ~ssl.SSLError: if the TLS handshake fails + + """ + if server_side is None: + server_side = not hostname + + if not ssl_context: + purpose = ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH + ssl_context = ssl.create_default_context(purpose) + + # Re-enable detection of unexpected EOFs if it was disabled by Python + if hasattr(ssl, 'OP_IGNORE_UNEXPECTED_EOF'): + ssl_context.options ^= ssl.OP_IGNORE_UNEXPECTED_EOF # type: ignore[attr-defined] + + bio_in = ssl.MemoryBIO() + bio_out = ssl.MemoryBIO() + ssl_object = ssl_context.wrap_bio(bio_in, bio_out, server_side=server_side, + server_hostname=hostname) + wrapper = cls(transport_stream=transport_stream, + standard_compatible=standard_compatible, _ssl_object=ssl_object, + _read_bio=bio_in, _write_bio=bio_out) + await wrapper._call_sslobject_method(ssl_object.do_handshake) + return wrapper + + async def _call_sslobject_method( + self, func: Callable[..., T_Retval], *args: object + ) -> T_Retval: + while True: + try: + result = func(*args) + except ssl.SSLWantReadError: + try: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + data = await self.transport_stream.receive() + except EndOfStream: + self._read_bio.write_eof() + except OSError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + else: + self._read_bio.write(data) + except ssl.SSLWantWriteError: + await self.transport_stream.send(self._write_bio.read()) + except ssl.SSLSyscallError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + except ssl.SSLError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + if (isinstance(exc, ssl.SSLEOFError) + or 'UNEXPECTED_EOF_WHILE_READING' in exc.strerror): + if self.standard_compatible: + raise BrokenResourceError from exc + else: + raise EndOfStream from None + + raise + else: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + return result + + async def unwrap(self) -> Tuple[AnyByteStream, bytes]: + """ + Does the TLS closing handshake. + + :return: a tuple of (wrapped byte stream, bytes left in the read buffer) + + """ + await self._call_sslobject_method(self._ssl_object.unwrap) + self._read_bio.write_eof() + self._write_bio.write_eof() + return self.transport_stream, self._read_bio.read() + + async def aclose(self) -> None: + if self.standard_compatible: + try: + await self.unwrap() + except BaseException: + await aclose_forcefully(self.transport_stream) + raise + + await self.transport_stream.aclose() + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + await self._call_sslobject_method(self._ssl_object.write, item) + + async def send_eof(self) -> None: + tls_version = self.extra(TLSAttribute.tls_version) + match = re.match(r'TLSv(\d+)(?:\.(\d+))?', tls_version) + if match: + major, minor = int(match.group(1)), int(match.group(2) or 0) + if (major, minor) < (1, 3): + raise NotImplementedError(f'send_eof() requires at least TLSv1.3; current ' + f'session uses {tls_version}') + + raise NotImplementedError('send_eof() has not yet been implemented for TLS streams') + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.transport_stream.extra_attributes, + TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, + TLSAttribute.channel_binding_tls_unique: self._ssl_object.get_channel_binding, + TLSAttribute.cipher: self._ssl_object.cipher, + TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), + TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert(True), + TLSAttribute.server_side: lambda: self._ssl_object.server_side, + TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers(), + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + TLSAttribute.ssl_object: lambda: self._ssl_object, + TLSAttribute.tls_version: self._ssl_object.version + } + + +@dataclass(eq=False) +class TLSListener(Listener[TLSStream]): + """ + A convenience listener that wraps another listener and auto-negotiates a TLS session on every + accepted connection. + + If the TLS handshake times out or raises an exception, :meth:`handle_handshake_error` is + called to do whatever post-mortem processing is deemed necessary. + + Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. + + :param Listener listener: the listener to wrap + :param ssl_context: the SSL context object + :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` + :param handshake_timeout: time limit for the TLS handshake + (passed to :func:`~anyio.fail_after`) + """ + + listener: Listener[Any] + ssl_context: ssl.SSLContext + standard_compatible: bool = True + handshake_timeout: float = 30 + + @staticmethod + async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: + f""" + Handle an exception raised during the TLS handshake. + + This method does 3 things: + + #. Forcefully closes the original stream + #. Logs the exception (unless it was a cancellation exception) using the ``{__name__}`` + logger + #. Reraises the exception if it was a base exception or a cancellation exception + + :param exc: the exception + :param stream: the original stream + + """ + await aclose_forcefully(stream) + + # Log all except cancellation exceptions + if not isinstance(exc, get_cancelled_exc_class()): + logging.getLogger(__name__).exception('Error during TLS handshake') + + # Only reraise base exceptions and cancellation exceptions + if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): + raise + + async def serve(self, handler: Callable[[TLSStream], Any], + task_group: Optional[TaskGroup] = None) -> None: + @wraps(handler) + async def handler_wrapper(stream: AnyByteStream) -> None: + from .. import fail_after + try: + with fail_after(self.handshake_timeout): + wrapped_stream = await TLSStream.wrap( + stream, ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible) + except BaseException as exc: + await self.handle_handshake_error(exc, stream) + else: + await handler(wrapped_stream) + + await self.listener.serve(handler_wrapper, task_group) + + async def aclose(self) -> None: + await self.listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + } diff --git a/venv/lib/python3.10/site-packages/anyio/to_process.py b/venv/lib/python3.10/site-packages/anyio/to_process.py new file mode 100644 index 0000000..463b21c --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/to_process.py @@ -0,0 +1,229 @@ +import os +import pickle +import subprocess +import sys +from collections import deque +from importlib.util import module_from_spec, spec_from_file_location +from typing import Callable, Deque, List, Optional, Set, Tuple, TypeVar, cast + +from ._core._eventloop import current_time, get_asynclib, get_cancelled_exc_class +from ._core._exceptions import BrokenWorkerProcess +from ._core._subprocesses import open_process +from ._core._synchronization import CapacityLimiter +from ._core._tasks import CancelScope, fail_after +from .abc import ByteReceiveStream, ByteSendStream, Process +from .lowlevel import RunVar, checkpoint_if_cancelled +from .streams.buffered import BufferedByteReceiveStream + +WORKER_MAX_IDLE_TIME = 300 # 5 minutes + +T_Retval = TypeVar('T_Retval') +_process_pool_workers: RunVar[Set[Process]] = RunVar('_process_pool_workers') +_process_pool_idle_workers: RunVar[Deque[Tuple[Process, float]]] = RunVar( + '_process_pool_idle_workers') +_default_process_limiter: RunVar[CapacityLimiter] = RunVar('_default_process_limiter') + + +async def run_sync( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None) -> T_Retval: + """ + Call the given function with the given arguments in a worker process. + + If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, + the worker process running it will be abruptly terminated using SIGKILL (or + ``terminateProcess()`` on Windows). + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation while it's running + :param limiter: capacity limiter to use to limit the total amount of processes running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + async def send_raw_command(pickled_cmd: bytes) -> object: + try: + await stdin.send(pickled_cmd) + response = await buffered.receive_until(b'\n', 50) + status, length = response.split(b' ') + if status not in (b'RETURN', b'EXCEPTION'): + raise RuntimeError(f'Worker process returned unexpected response: {response!r}') + + pickled_response = await buffered.receive_exactly(int(length)) + except BaseException as exc: + workers.discard(process) + try: + process.kill() + with CancelScope(shield=True): + await process.aclose() + except ProcessLookupError: + pass + + if isinstance(exc, get_cancelled_exc_class()): + raise + else: + raise BrokenWorkerProcess from exc + + retval = pickle.loads(pickled_response) + if status == b'EXCEPTION': + assert isinstance(retval, BaseException) + raise retval + else: + return retval + + # First pickle the request before trying to reserve a worker process + await checkpoint_if_cancelled() + request = pickle.dumps(('run', func, args), protocol=pickle.HIGHEST_PROTOCOL) + + # If this is the first run in this event loop thread, set up the necessary variables + try: + workers = _process_pool_workers.get() + idle_workers = _process_pool_idle_workers.get() + except LookupError: + workers = set() + idle_workers = deque() + _process_pool_workers.set(workers) + _process_pool_idle_workers.set(idle_workers) + get_asynclib().setup_process_pool_exit_at_shutdown(workers) + + async with (limiter or current_default_process_limiter()): + # Pop processes from the pool (starting from the most recently used) until we find one that + # hasn't exited yet + process: Process + while idle_workers: + process, idle_since = idle_workers.pop() + if process.returncode is None: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream(cast(ByteReceiveStream, process.stdout)) + + # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME seconds or + # longer + now = current_time() + killed_processes: List[Process] = [] + while idle_workers: + if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: + break + + process, idle_since = idle_workers.popleft() + process.kill() + workers.remove(process) + killed_processes.append(process) + + with CancelScope(shield=True): + for process in killed_processes: + await process.aclose() + + break + + workers.remove(process) + else: + command = [sys.executable, '-u', '-m', __name__] + process = await open_process(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE) + try: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream(cast(ByteReceiveStream, process.stdout)) + with fail_after(20): + message = await buffered.receive(6) + + if message != b'READY\n': + raise BrokenWorkerProcess( + f'Worker process returned unexpected response: {message!r}') + + main_module_path = getattr(sys.modules['__main__'], '__file__', None) + pickled = pickle.dumps(('init', sys.path, main_module_path), + protocol=pickle.HIGHEST_PROTOCOL) + await send_raw_command(pickled) + except (BrokenWorkerProcess, get_cancelled_exc_class()): + raise + except BaseException as exc: + process.kill() + raise BrokenWorkerProcess('Error during worker process initialization') from exc + + workers.add(process) + + with CancelScope(shield=not cancellable): + try: + return cast(T_Retval, await send_raw_command(request)) + finally: + if process in workers: + idle_workers.append((process, current_time())) + + +def current_default_process_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of worker processes. + + :return: a capacity limiter object + + """ + try: + return _default_process_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or 2) + _default_process_limiter.set(limiter) + return limiter + + +def process_worker() -> None: + # Redirect standard streams to os.devnull so that user code won't interfere with the + # parent-worker communication + stdin = sys.stdin + stdout = sys.stdout + sys.stdin = open(os.devnull) + sys.stdout = open(os.devnull, 'w') + + stdout.buffer.write(b'READY\n') + while True: + retval = exception = None + try: + command, *args = pickle.load(stdin.buffer) + except EOFError: + return + except BaseException as exc: + exception = exc + else: + if command == 'run': + func, args = args + try: + retval = func(*args) + except BaseException as exc: + exception = exc + elif command == 'init': + main_module_path: Optional[str] + sys.path, main_module_path = args + del sys.modules['__main__'] + if main_module_path: + # Load the parent's main module but as __mp_main__ instead of __main__ + # (like multiprocessing does) to avoid infinite recursion + try: + spec = spec_from_file_location('__mp_main__', main_module_path) + if spec and spec.loader: + main = module_from_spec(spec) + spec.loader.exec_module(main) + sys.modules['__main__'] = main + except BaseException as exc: + exception = exc + + try: + if exception is not None: + status = b'EXCEPTION' + pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) + else: + status = b'RETURN' + pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) + except BaseException as exc: + exception = exc + status = b'EXCEPTION' + pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) + + stdout.buffer.write(b'%s %d\n' % (status, len(pickled))) + stdout.buffer.write(pickled) + + # Respect SIGTERM + if isinstance(exception, SystemExit): + raise exception + + +if __name__ == '__main__': + process_worker() diff --git a/venv/lib/python3.10/site-packages/anyio/to_thread.py b/venv/lib/python3.10/site-packages/anyio/to_thread.py new file mode 100644 index 0000000..5fc9589 --- /dev/null +++ b/venv/lib/python3.10/site-packages/anyio/to_thread.py @@ -0,0 +1,54 @@ +from typing import Callable, Optional, TypeVar +from warnings import warn + +from ._core._eventloop import get_asynclib +from .abc import CapacityLimiter + +T_Retval = TypeVar('T_Retval') + + +async def run_sync( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None) -> T_Retval: + """ + Call the given function with the given arguments in a worker thread. + + If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, + the thread will still run its course but its return value (or any raised exception) will be + ignored. + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation + :param limiter: capacity limiter to use to limit the total amount of threads running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + return await get_asynclib().run_sync_in_worker_thread(func, *args, cancellable=cancellable, + limiter=limiter) + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None) -> T_Retval: + warn('run_sync_in_worker_thread() has been deprecated, use anyio.to_thread.run_sync() instead', + DeprecationWarning) + return await run_sync(func, *args, cancellable=cancellable, limiter=limiter) + + +def current_default_thread_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of concurrent threads. + + :return: a capacity limiter object + + """ + return get_asynclib().current_default_thread_limiter() + + +def current_default_worker_thread_limiter() -> CapacityLimiter: + warn('current_default_worker_thread_limiter() has been deprecated, ' + 'use anyio.to_thread.current_default_thread_limiter() instead', + DeprecationWarning) + return current_default_thread_limiter() diff --git a/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/LICENSE b/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/LICENSE new file mode 100644 index 0000000..5f4f225 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) Django Software Foundation and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of Django nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/METADATA b/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/METADATA new file mode 100644 index 0000000..30b9442 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/METADATA @@ -0,0 +1,248 @@ +Metadata-Version: 2.1 +Name: asgiref +Version: 3.5.1 +Summary: ASGI specs, helper code, and adapters +Home-page: https://github.com/django/asgiref/ +Author: Django Software Foundation +Author-email: foundation@djangoproject.com +License: BSD +Project-URL: Documentation, https://asgi.readthedocs.io/ +Project-URL: Further Documentation, https://docs.djangoproject.com/en/stable/topics/async/#async-adapter-functions +Project-URL: Changelog, https://github.com/django/asgiref/blob/master/CHANGELOG.txt +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.7 +License-File: LICENSE +Requires-Dist: typing-extensions ; python_version < "3.8" +Provides-Extra: tests +Requires-Dist: pytest ; extra == 'tests' +Requires-Dist: pytest-asyncio ; extra == 'tests' +Requires-Dist: mypy (>=0.800) ; extra == 'tests' + +asgiref +======= + +.. image:: https://api.travis-ci.org/django/asgiref.svg + :target: https://travis-ci.org/django/asgiref + +.. image:: https://img.shields.io/pypi/v/asgiref.svg + :target: https://pypi.python.org/pypi/asgiref + +ASGI is a standard for Python asynchronous web apps and servers to communicate +with each other, and positioned as an asynchronous successor to WSGI. You can +read more at https://asgi.readthedocs.io/en/latest/ + +This package includes ASGI base libraries, such as: + +* Sync-to-async and async-to-sync function wrappers, ``asgiref.sync`` +* Server base classes, ``asgiref.server`` +* A WSGI-to-ASGI adapter, in ``asgiref.wsgi`` + + +Function wrappers +----------------- + +These allow you to wrap or decorate async or sync functions to call them from +the other style (so you can call async functions from a synchronous thread, +or vice-versa). + +In particular: + +* AsyncToSync lets a synchronous subthread stop and wait while the async + function is called on the main thread's event loop, and then control is + returned to the thread when the async function is finished. + +* SyncToAsync lets async code call a synchronous function, which is run in + a threadpool and control returned to the async coroutine when the synchronous + function completes. + +The idea is to make it easier to call synchronous APIs from async code and +asynchronous APIs from synchronous code so it's easier to transition code from +one style to the other. In the case of Channels, we wrap the (synchronous) +Django view system with SyncToAsync to allow it to run inside the (asynchronous) +ASGI server. + +Note that exactly what threads things run in is very specific, and aimed to +keep maximum compatibility with old synchronous code. See +"Synchronous code & Threads" below for a full explanation. By default, +``sync_to_async`` will run all synchronous code in the program in the same +thread for safety reasons; you can disable this for more performance with +``@sync_to_async(thread_sensitive=False)``, but make sure that your code does +not rely on anything bound to threads (like database connections) when you do. + + +Threadlocal replacement +----------------------- + +This is a drop-in replacement for ``threading.local`` that works with both +threads and asyncio Tasks. Even better, it will proxy values through from a +task-local context to a thread-local context when you use ``sync_to_async`` +to run things in a threadpool, and vice-versa for ``async_to_sync``. + +If you instead want true thread- and task-safety, you can set +``thread_critical`` on the Local object to ensure this instead. + + +Server base classes +------------------- + +Includes a ``StatelessServer`` class which provides all the hard work of +writing a stateless server (as in, does not handle direct incoming sockets +but instead consumes external streams or sockets to work out what is happening). + +An example of such a server would be a chatbot server that connects out to +a central chat server and provides a "connection scope" per user chatting to +it. There's only one actual connection, but the server has to separate things +into several scopes for easier writing of the code. + +You can see an example of this being used in `frequensgi `_. + + +WSGI-to-ASGI adapter +-------------------- + +Allows you to wrap a WSGI application so it appears as a valid ASGI application. + +Simply wrap it around your WSGI application like so:: + + asgi_application = WsgiToAsgi(wsgi_application) + +The WSGI application will be run in a synchronous threadpool, and the wrapped +ASGI application will be one that accepts ``http`` class messages. + +Please note that not all extended features of WSGI may be supported (such as +file handles for incoming POST bodies). + + +Dependencies +------------ + +``asgiref`` requires Python 3.7 or higher. + + +Contributing +------------ + +Please refer to the +`main Channels contributing docs `_. + + +Testing +''''''' + +To run tests, make sure you have installed the ``tests`` extra with the package:: + + cd asgiref/ + pip install -e .[tests] + pytest + + +Building the documentation +'''''''''''''''''''''''''' + +The documentation uses `Sphinx `_:: + + cd asgiref/docs/ + pip install sphinx + +To build the docs, you can use the default tools:: + + sphinx-build -b html . _build/html # or `make html`, if you've got make set up + cd _build/html + python -m http.server + +...or you can use ``sphinx-autobuild`` to run a server and rebuild/reload +your documentation changes automatically:: + + pip install sphinx-autobuild + sphinx-autobuild . _build/html + + +Releasing +''''''''' + +To release, first add details to CHANGELOG.txt and update the version number in ``asgiref/__init__.py``. + +Then, build and push the packages:: + + python -m build + twine upload dist/* + rm -r build/ dist/ + + +Implementation Details +---------------------- + +Synchronous code & threads +'''''''''''''''''''''''''' + +The ``asgiref.sync`` module provides two wrappers that let you go between +asynchronous and synchronous code at will, while taking care of the rough edges +for you. + +Unfortunately, the rough edges are numerous, and the code has to work especially +hard to keep things in the same thread as much as possible. Notably, the +restrictions we are working with are: + +* All synchronous code called through ``SyncToAsync`` and marked with + ``thread_sensitive`` should run in the same thread as each other (and if the + outer layer of the program is synchronous, the main thread) + +* If a thread already has a running async loop, ``AsyncToSync`` can't run things + on that loop if it's blocked on synchronous code that is above you in the + call stack. + +The first compromise you get to might be that ``thread_sensitive`` code should +just run in the same thread and not spawn in a sub-thread, fulfilling the first +restriction, but that immediately runs you into the second restriction. + +The only real solution is to essentially have a variant of ThreadPoolExecutor +that executes any ``thread_sensitive`` code on the outermost synchronous +thread - either the main thread, or a single spawned subthread. + +This means you now have two basic states: + +* If the outermost layer of your program is synchronous, then all async code + run through ``AsyncToSync`` will run in a per-call event loop in arbitrary + sub-threads, while all ``thread_sensitive`` code will run in the main thread. + +* If the outermost layer of your program is asynchronous, then all async code + runs on the main thread's event loop, and all ``thread_sensitive`` synchronous + code will run in a single shared sub-thread. + +Crucially, this means that in both cases there is a thread which is a shared +resource that all ``thread_sensitive`` code must run on, and there is a chance +that this thread is currently blocked on its own ``AsyncToSync`` call. Thus, +``AsyncToSync`` needs to act as an executor for thread code while it's blocking. + +The ``CurrentThreadExecutor`` class provides this functionality; rather than +simply waiting on a Future, you can call its ``run_until_future`` method and +it will run submitted code until that Future is done. This means that code +inside the call can then run code on your thread. + + +Maintenance and Security +------------------------ + +To report security issues, please contact security@djangoproject.com. For GPG +signatures and more security process information, see +https://docs.djangoproject.com/en/dev/internals/security/. + +To report bugs or request new features, please open a new GitHub issue. + +This repository is part of the Channels project. For the shepherd and maintenance team, please see the +`main Channels readme `_. + + diff --git a/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/RECORD b/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/RECORD new file mode 100644 index 0000000..9a110e8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/RECORD @@ -0,0 +1,27 @@ +asgiref-3.5.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +asgiref-3.5.1.dist-info/LICENSE,sha256=uEZBXRtRTpwd_xSiLeuQbXlLxUbKYSn5UKGM0JHipmk,1552 +asgiref-3.5.1.dist-info/METADATA,sha256=PF1mkM5ipqWUZVIIuxmTWiNFcnGmkJWNjq2G7sguLic,9163 +asgiref-3.5.1.dist-info/RECORD,, +asgiref-3.5.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +asgiref-3.5.1.dist-info/top_level.txt,sha256=bokQjCzwwERhdBiPdvYEZa4cHxT4NCeAffQNUqJ8ssg,8 +asgiref/__init__.py,sha256=q84OxOhhZQRg9QLCpRzm8ZW7aGHtGUAnNgVlGvZ9vBg,22 +asgiref/__pycache__/__init__.cpython-310.pyc,, +asgiref/__pycache__/compatibility.cpython-310.pyc,, +asgiref/__pycache__/current_thread_executor.cpython-310.pyc,, +asgiref/__pycache__/local.cpython-310.pyc,, +asgiref/__pycache__/server.cpython-310.pyc,, +asgiref/__pycache__/sync.cpython-310.pyc,, +asgiref/__pycache__/testing.cpython-310.pyc,, +asgiref/__pycache__/timeout.cpython-310.pyc,, +asgiref/__pycache__/typing.cpython-310.pyc,, +asgiref/__pycache__/wsgi.cpython-310.pyc,, +asgiref/compatibility.py,sha256=MVH2bEdiCMMVTLbE-1V6KiU7q4LwqzP7PIufeXa-njM,1598 +asgiref/current_thread_executor.py,sha256=oeH8zv2tTmcbpxdUmOSMzbEXzeY5nJzIMFvzprE95gA,2801 +asgiref/local.py,sha256=nx5RqVFLYgUJVaxzApuQUW7dd9y21sruMYdgISoRs1k,4854 +asgiref/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +asgiref/server.py,sha256=egTQhZo1k4G0F7SSBQNp_VOekpGcjBJZU2kkCoiGC_M,6005 +asgiref/sync.py,sha256=bZaSXL1m2yL9_b_fJSy9ZzXSHR0pvJ6dDqSVNvY1TRs,19920 +asgiref/testing.py,sha256=3byNRV7Oto_Fg8Z-fErQJ3yGf7OQlcUexbN_cDQugzQ,3119 +asgiref/timeout.py,sha256=5Ekbmn3X1HPR55qgx-hPJMPEu_-YoivHqNhFEitiSYE,3440 +asgiref/typing.py,sha256=MZ7vbJY1F7EQqo9gL9pMSFRMw9b_SQrQQsnvlJQ2iP4,5603 +asgiref/wsgi.py,sha256=-L0eo_uK_dq7EPjv1meW1BRGytURaO9NPESxnJc9CtA,6575 diff --git a/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/WHEEL b/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/top_level.txt new file mode 100644 index 0000000..ddf99d3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref-3.5.1.dist-info/top_level.txt @@ -0,0 +1 @@ +asgiref diff --git a/venv/lib/python3.10/site-packages/asgiref/__init__.py b/venv/lib/python3.10/site-packages/asgiref/__init__.py new file mode 100644 index 0000000..0c11bab --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref/__init__.py @@ -0,0 +1 @@ +__version__ = "3.5.1" diff --git a/venv/lib/python3.10/site-packages/asgiref/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/asgiref/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..7fd4c4e Binary files /dev/null and b/venv/lib/python3.10/site-packages/asgiref/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/asgiref/__pycache__/compatibility.cpython-310.pyc b/venv/lib/python3.10/site-packages/asgiref/__pycache__/compatibility.cpython-310.pyc new file mode 100644 index 0000000..7fef8bc Binary files /dev/null and b/venv/lib/python3.10/site-packages/asgiref/__pycache__/compatibility.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/asgiref/__pycache__/current_thread_executor.cpython-310.pyc b/venv/lib/python3.10/site-packages/asgiref/__pycache__/current_thread_executor.cpython-310.pyc new file mode 100644 index 0000000..cad1143 Binary files /dev/null and b/venv/lib/python3.10/site-packages/asgiref/__pycache__/current_thread_executor.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/asgiref/__pycache__/local.cpython-310.pyc b/venv/lib/python3.10/site-packages/asgiref/__pycache__/local.cpython-310.pyc new file mode 100644 index 0000000..e094d97 Binary files /dev/null and b/venv/lib/python3.10/site-packages/asgiref/__pycache__/local.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/asgiref/__pycache__/server.cpython-310.pyc b/venv/lib/python3.10/site-packages/asgiref/__pycache__/server.cpython-310.pyc new file mode 100644 index 0000000..5dacfe1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/asgiref/__pycache__/server.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/asgiref/__pycache__/sync.cpython-310.pyc b/venv/lib/python3.10/site-packages/asgiref/__pycache__/sync.cpython-310.pyc new file mode 100644 index 0000000..119d33f Binary files /dev/null and b/venv/lib/python3.10/site-packages/asgiref/__pycache__/sync.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/asgiref/__pycache__/testing.cpython-310.pyc b/venv/lib/python3.10/site-packages/asgiref/__pycache__/testing.cpython-310.pyc new file mode 100644 index 0000000..075e781 Binary files /dev/null and b/venv/lib/python3.10/site-packages/asgiref/__pycache__/testing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/asgiref/__pycache__/timeout.cpython-310.pyc b/venv/lib/python3.10/site-packages/asgiref/__pycache__/timeout.cpython-310.pyc new file mode 100644 index 0000000..01c80be Binary files /dev/null and b/venv/lib/python3.10/site-packages/asgiref/__pycache__/timeout.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/asgiref/__pycache__/typing.cpython-310.pyc b/venv/lib/python3.10/site-packages/asgiref/__pycache__/typing.cpython-310.pyc new file mode 100644 index 0000000..0eb0cfd Binary files /dev/null and b/venv/lib/python3.10/site-packages/asgiref/__pycache__/typing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/asgiref/__pycache__/wsgi.cpython-310.pyc b/venv/lib/python3.10/site-packages/asgiref/__pycache__/wsgi.cpython-310.pyc new file mode 100644 index 0000000..be9bbbf Binary files /dev/null and b/venv/lib/python3.10/site-packages/asgiref/__pycache__/wsgi.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/asgiref/compatibility.py b/venv/lib/python3.10/site-packages/asgiref/compatibility.py new file mode 100644 index 0000000..eccaee0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref/compatibility.py @@ -0,0 +1,47 @@ +import asyncio +import inspect + + +def is_double_callable(application): + """ + Tests to see if an application is a legacy-style (double-callable) application. + """ + # Look for a hint on the object first + if getattr(application, "_asgi_single_callable", False): + return False + if getattr(application, "_asgi_double_callable", False): + return True + # Uninstanted classes are double-callable + if inspect.isclass(application): + return True + # Instanted classes depend on their __call__ + if hasattr(application, "__call__"): + # We only check to see if its __call__ is a coroutine function - + # if it's not, it still might be a coroutine function itself. + if asyncio.iscoroutinefunction(application.__call__): + return False + # Non-classes we just check directly + return not asyncio.iscoroutinefunction(application) + + +def double_to_single_callable(application): + """ + Transforms a double-callable ASGI application into a single-callable one. + """ + + async def new_application(scope, receive, send): + instance = application(scope) + return await instance(receive, send) + + return new_application + + +def guarantee_single_callable(application): + """ + Takes either a single- or double-callable application and always returns it + in single-callable style. Use this to add backwards compatibility for ASGI + 2.0 applications to your server/test harness/etc. + """ + if is_double_callable(application): + application = double_to_single_callable(application) + return application diff --git a/venv/lib/python3.10/site-packages/asgiref/current_thread_executor.py b/venv/lib/python3.10/site-packages/asgiref/current_thread_executor.py new file mode 100644 index 0000000..a7898f8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref/current_thread_executor.py @@ -0,0 +1,81 @@ +import queue +import threading +from concurrent.futures import Executor, Future + + +class _WorkItem: + """ + Represents an item needing to be run in the executor. + Copied from ThreadPoolExecutor (but it's private, so we're not going to rely on importing it) + """ + + def __init__(self, future, fn, args, kwargs): + self.future = future + self.fn = fn + self.args = args + self.kwargs = kwargs + + def run(self): + if not self.future.set_running_or_notify_cancel(): + return + try: + result = self.fn(*self.args, **self.kwargs) + except BaseException as exc: + self.future.set_exception(exc) + # Break a reference cycle with the exception 'exc' + self = None + else: + self.future.set_result(result) + + +class CurrentThreadExecutor(Executor): + """ + An Executor that actually runs code in the thread it is instantiated in. + Passed to other threads running async code, so they can run sync code in + the thread they came from. + """ + + def __init__(self): + self._work_thread = threading.current_thread() + self._work_queue = queue.Queue() + self._broken = False + + def run_until_future(self, future): + """ + Runs the code in the work queue until a result is available from the future. + Should be run from the thread the executor is initialised in. + """ + # Check we're in the right thread + if threading.current_thread() != self._work_thread: + raise RuntimeError( + "You cannot run CurrentThreadExecutor from a different thread" + ) + future.add_done_callback(self._work_queue.put) + # Keep getting and running work items until we get the future we're waiting for + # back via the future's done callback. + try: + while True: + # Get a work item and run it + work_item = self._work_queue.get() + if work_item is future: + return + work_item.run() + del work_item + finally: + self._broken = True + + def submit(self, fn, *args, **kwargs): + # Check they're not submitting from the same thread + if threading.current_thread() == self._work_thread: + raise RuntimeError( + "You cannot submit onto CurrentThreadExecutor from its own thread" + ) + # Check they're not too late or the executor errored + if self._broken: + raise RuntimeError("CurrentThreadExecutor already quit or is broken") + # Add to work queue + f = Future() + work_item = _WorkItem(f, fn, args, kwargs) + self._work_queue.put(work_item) + # Return the future + return f diff --git a/venv/lib/python3.10/site-packages/asgiref/local.py b/venv/lib/python3.10/site-packages/asgiref/local.py new file mode 100644 index 0000000..17314d4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref/local.py @@ -0,0 +1,120 @@ +import random +import string +import sys +import threading +import weakref + + +class Local: + """ + A drop-in replacement for threading.locals that also works with asyncio + Tasks (via the current_task asyncio method), and passes locals through + sync_to_async and async_to_sync. + + Specifically: + - Locals work per-coroutine on any thread not spawned using asgiref + - Locals work per-thread on any thread not spawned using asgiref + - Locals are shared with the parent coroutine when using sync_to_async + - Locals are shared with the parent thread when using async_to_sync + (and if that thread was launched using sync_to_async, with its parent + coroutine as well, with this working for indefinite levels of nesting) + + Set thread_critical to True to not allow locals to pass from an async Task + to a thread it spawns. This is needed for code that truly needs + thread-safety, as opposed to things used for helpful context (e.g. sqlite + does not like being called from a different thread to the one it is from). + Thread-critical code will still be differentiated per-Task within a thread + as it is expected it does not like concurrent access. + + This doesn't use contextvars as it needs to support 3.6. Once it can support + 3.7 only, we can then reimplement the storage more nicely. + """ + + def __init__(self, thread_critical: bool = False) -> None: + self._thread_critical = thread_critical + self._thread_lock = threading.RLock() + self._context_refs: "weakref.WeakSet[object]" = weakref.WeakSet() + # Random suffixes stop accidental reuse between different Locals, + # though we try to force deletion as well. + self._attr_name = "_asgiref_local_impl_{}_{}".format( + id(self), + "".join(random.choice(string.ascii_letters) for i in range(8)), + ) + + def _get_context_id(self): + """ + Get the ID we should use for looking up variables + """ + # Prevent a circular reference + from .sync import AsyncToSync, SyncToAsync + + # First, pull the current task if we can + context_id = SyncToAsync.get_current_task() + context_is_async = True + # OK, let's try for a thread ID + if context_id is None: + context_id = threading.current_thread() + context_is_async = False + # If we're thread-critical, we stop here, as we can't share contexts. + if self._thread_critical: + return context_id + # Now, take those and see if we can resolve them through the launch maps + for i in range(sys.getrecursionlimit()): + try: + if context_is_async: + # Tasks have a source thread in AsyncToSync + context_id = AsyncToSync.launch_map[context_id] + context_is_async = False + else: + # Threads have a source task in SyncToAsync + context_id = SyncToAsync.launch_map[context_id] + context_is_async = True + except KeyError: + break + else: + # Catch infinite loops (they happen if you are screwing around + # with AsyncToSync implementations) + raise RuntimeError("Infinite launch_map loops") + return context_id + + def _get_storage(self): + context_obj = self._get_context_id() + if not hasattr(context_obj, self._attr_name): + setattr(context_obj, self._attr_name, {}) + self._context_refs.add(context_obj) + return getattr(context_obj, self._attr_name) + + def __del__(self): + try: + for context_obj in self._context_refs: + try: + delattr(context_obj, self._attr_name) + except AttributeError: + pass + except TypeError: + # WeakSet.__iter__ can crash when interpreter is shutting down due + # to _IterationGuard being None. + pass + + def __getattr__(self, key): + with self._thread_lock: + storage = self._get_storage() + if key in storage: + return storage[key] + else: + raise AttributeError(f"{self!r} object has no attribute {key!r}") + + def __setattr__(self, key, value): + if key in ("_context_refs", "_thread_critical", "_thread_lock", "_attr_name"): + return super().__setattr__(key, value) + with self._thread_lock: + storage = self._get_storage() + storage[key] = value + + def __delattr__(self, key): + with self._thread_lock: + storage = self._get_storage() + if key in storage: + del storage[key] + else: + raise AttributeError(f"{self!r} object has no attribute {key!r}") diff --git a/venv/lib/python3.10/site-packages/asgiref/py.typed b/venv/lib/python3.10/site-packages/asgiref/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/asgiref/server.py b/venv/lib/python3.10/site-packages/asgiref/server.py new file mode 100644 index 0000000..43c28c6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref/server.py @@ -0,0 +1,157 @@ +import asyncio +import logging +import time +import traceback + +from .compatibility import guarantee_single_callable + +logger = logging.getLogger(__name__) + + +class StatelessServer: + """ + Base server class that handles basic concepts like application instance + creation/pooling, exception handling, and similar, for stateless protocols + (i.e. ones without actual incoming connections to the process) + + Your code should override the handle() method, doing whatever it needs to, + and calling get_or_create_application_instance with a unique `scope_id` + and `scope` for the scope it wants to get. + + If an application instance is found with the same `scope_id`, you are + given its input queue, otherwise one is made for you with the scope provided + and you are given that fresh new input queue. Either way, you should do + something like: + + input_queue = self.get_or_create_application_instance( + "user-123456", + {"type": "testprotocol", "user_id": "123456", "username": "andrew"}, + ) + input_queue.put_nowait(message) + + If you try and create an application instance and there are already + `max_application` instances, the oldest/least recently used one will be + reclaimed and shut down to make space. + + Application coroutines that error will be found periodically (every 100ms + by default) and have their exceptions printed to the console. Override + application_exception() if you want to do more when this happens. + + If you override run(), make sure you handle things like launching the + application checker. + """ + + application_checker_interval = 0.1 + + def __init__(self, application, max_applications=1000): + # Parameters + self.application = application + self.max_applications = max_applications + # Initialisation + self.application_instances = {} + + ### Mainloop and handling + + def run(self): + """ + Runs the asyncio event loop with our handler loop. + """ + event_loop = asyncio.get_event_loop() + asyncio.ensure_future(self.application_checker()) + try: + event_loop.run_until_complete(self.handle()) + except KeyboardInterrupt: + logger.info("Exiting due to Ctrl-C/interrupt") + + async def handle(self): + raise NotImplementedError("You must implement handle()") + + async def application_send(self, scope, message): + """ + Receives outbound sends from applications and handles them. + """ + raise NotImplementedError("You must implement application_send()") + + ### Application instance management + + def get_or_create_application_instance(self, scope_id, scope): + """ + Creates an application instance and returns its queue. + """ + if scope_id in self.application_instances: + self.application_instances[scope_id]["last_used"] = time.time() + return self.application_instances[scope_id]["input_queue"] + # See if we need to delete an old one + while len(self.application_instances) > self.max_applications: + self.delete_oldest_application_instance() + # Make an instance of the application + input_queue = asyncio.Queue() + application_instance = guarantee_single_callable(self.application) + # Run it, and stash the future for later checking + future = asyncio.ensure_future( + application_instance( + scope=scope, + receive=input_queue.get, + send=lambda message: self.application_send(scope, message), + ), + ) + self.application_instances[scope_id] = { + "input_queue": input_queue, + "future": future, + "scope": scope, + "last_used": time.time(), + } + return input_queue + + def delete_oldest_application_instance(self): + """ + Finds and deletes the oldest application instance + """ + oldest_time = min( + details["last_used"] for details in self.application_instances.values() + ) + for scope_id, details in self.application_instances.items(): + if details["last_used"] == oldest_time: + self.delete_application_instance(scope_id) + # Return to make sure we only delete one in case two have + # the same oldest time + return + + def delete_application_instance(self, scope_id): + """ + Removes an application instance (makes sure its task is stopped, + then removes it from the current set) + """ + details = self.application_instances[scope_id] + del self.application_instances[scope_id] + if not details["future"].done(): + details["future"].cancel() + + async def application_checker(self): + """ + Goes through the set of current application instance Futures and cleans up + any that are done/prints exceptions for any that errored. + """ + while True: + await asyncio.sleep(self.application_checker_interval) + for scope_id, details in list(self.application_instances.items()): + if details["future"].done(): + exception = details["future"].exception() + if exception: + await self.application_exception(exception, details) + try: + del self.application_instances[scope_id] + except KeyError: + # Exception handling might have already got here before us. That's fine. + pass + + async def application_exception(self, exception, application_details): + """ + Called whenever an application coroutine has an exception. + """ + logging.error( + "Exception inside application: %s\n%s%s", + exception, + "".join(traceback.format_tb(exception.__traceback__)), + f" {exception}", + ) diff --git a/venv/lib/python3.10/site-packages/asgiref/sync.py b/venv/lib/python3.10/site-packages/asgiref/sync.py new file mode 100644 index 0000000..a70dac1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref/sync.py @@ -0,0 +1,523 @@ +import asyncio +import asyncio.coroutines +import contextvars +import functools +import inspect +import os +import sys +import threading +import warnings +import weakref +from concurrent.futures import Future, ThreadPoolExecutor +from typing import Any, Callable, Dict, Optional, overload + +from .current_thread_executor import CurrentThreadExecutor +from .local import Local + + +def _restore_context(context): + # Check for changes in contextvars, and set them to the current + # context for downstream consumers + for cvar in context: + try: + if cvar.get() != context.get(cvar): + cvar.set(context.get(cvar)) + except LookupError: + cvar.set(context.get(cvar)) + + +def _iscoroutinefunction_or_partial(func: Any) -> bool: + # Python < 3.8 does not correctly determine partially wrapped + # coroutine functions are coroutine functions, hence the need for + # this to exist. Code taken from CPython. + if sys.version_info >= (3, 8): + return asyncio.iscoroutinefunction(func) + else: + while inspect.ismethod(func): + func = func.__func__ + while isinstance(func, functools.partial): + func = func.func + + return asyncio.iscoroutinefunction(func) + + +class ThreadSensitiveContext: + """Async context manager to manage context for thread sensitive mode + + This context manager controls which thread pool executor is used when in + thread sensitive mode. By default, a single thread pool executor is shared + within a process. + + In Python 3.7+, the ThreadSensitiveContext() context manager may be used to + specify a thread pool per context. + + This context manager is re-entrant, so only the outer-most call to + ThreadSensitiveContext will set the context. + + Usage: + + >>> import time + >>> async with ThreadSensitiveContext(): + ... await sync_to_async(time.sleep, 1)() + """ + + def __init__(self): + self.token = None + + async def __aenter__(self): + try: + SyncToAsync.thread_sensitive_context.get() + except LookupError: + self.token = SyncToAsync.thread_sensitive_context.set(self) + + return self + + async def __aexit__(self, exc, value, tb): + if not self.token: + return + + executor = SyncToAsync.context_to_thread_executor.pop(self, None) + if executor: + executor.shutdown() + SyncToAsync.thread_sensitive_context.reset(self.token) + + +class AsyncToSync: + """ + Utility class which turns an awaitable that only works on the thread with + the event loop into a synchronous callable that works in a subthread. + + If the call stack contains an async loop, the code runs there. + Otherwise, the code runs in a new loop in a new thread. + + Either way, this thread then pauses and waits to run any thread_sensitive + code called from further down the call stack using SyncToAsync, before + finally exiting once the async task returns. + """ + + # Maps launched Tasks to the threads that launched them (for locals impl) + launch_map: "Dict[asyncio.Task[object], threading.Thread]" = {} + + # Keeps track of which CurrentThreadExecutor to use. This uses an asgiref + # Local, not a threadlocal, so that tasks can work out what their parent used. + executors = Local() + + # When we can't find a CurrentThreadExecutor from the context, such as + # inside create_task, we'll look it up here from the running event loop. + loop_thread_executors: "Dict[asyncio.AbstractEventLoop, CurrentThreadExecutor]" = {} + + def __init__(self, awaitable, force_new_loop=False): + if not callable(awaitable) or not _iscoroutinefunction_or_partial(awaitable): + # Python does not have very reliable detection of async functions + # (lots of false negatives) so this is just a warning. + warnings.warn( + "async_to_sync was passed a non-async-marked callable", stacklevel=2 + ) + self.awaitable = awaitable + try: + self.__self__ = self.awaitable.__self__ + except AttributeError: + pass + if force_new_loop: + # They have asked that we always run in a new sub-loop. + self.main_event_loop = None + else: + try: + self.main_event_loop = asyncio.get_running_loop() + except RuntimeError: + # There's no event loop in this thread. Look for the threadlocal if + # we're inside SyncToAsync + main_event_loop_pid = getattr( + SyncToAsync.threadlocal, "main_event_loop_pid", None + ) + # We make sure the parent loop is from the same process - if + # they've forked, this is not going to be valid any more (#194) + if main_event_loop_pid and main_event_loop_pid == os.getpid(): + self.main_event_loop = getattr( + SyncToAsync.threadlocal, "main_event_loop", None + ) + else: + self.main_event_loop = None + + def __call__(self, *args, **kwargs): + # You can't call AsyncToSync from a thread with a running event loop + try: + event_loop = asyncio.get_running_loop() + except RuntimeError: + pass + else: + if event_loop.is_running(): + raise RuntimeError( + "You cannot use AsyncToSync in the same thread as an async event loop - " + "just await the async function directly." + ) + + # Wrapping context in list so it can be reassigned from within + # `main_wrap`. + context = [contextvars.copy_context()] + + # Make a future for the return information + call_result = Future() + # Get the source thread + source_thread = threading.current_thread() + # Make a CurrentThreadExecutor we'll use to idle in this thread - we + # need one for every sync frame, even if there's one above us in the + # same thread. + if hasattr(self.executors, "current"): + old_current_executor = self.executors.current + else: + old_current_executor = None + current_executor = CurrentThreadExecutor() + self.executors.current = current_executor + loop = None + # Use call_soon_threadsafe to schedule a synchronous callback on the + # main event loop's thread if it's there, otherwise make a new loop + # in this thread. + try: + awaitable = self.main_wrap( + args, kwargs, call_result, source_thread, sys.exc_info(), context + ) + + if not (self.main_event_loop and self.main_event_loop.is_running()): + # Make our own event loop - in a new thread - and run inside that. + loop = asyncio.new_event_loop() + self.loop_thread_executors[loop] = current_executor + loop_executor = ThreadPoolExecutor(max_workers=1) + loop_future = loop_executor.submit( + self._run_event_loop, loop, awaitable + ) + if current_executor: + # Run the CurrentThreadExecutor until the future is done + current_executor.run_until_future(loop_future) + # Wait for future and/or allow for exception propagation + loop_future.result() + else: + # Call it inside the existing loop + self.main_event_loop.call_soon_threadsafe( + self.main_event_loop.create_task, awaitable + ) + if current_executor: + # Run the CurrentThreadExecutor until the future is done + current_executor.run_until_future(call_result) + finally: + # Clean up any executor we were running + if loop is not None: + del self.loop_thread_executors[loop] + if hasattr(self.executors, "current"): + del self.executors.current + if old_current_executor: + self.executors.current = old_current_executor + _restore_context(context[0]) + + # Wait for results from the future. + return call_result.result() + + def _run_event_loop(self, loop, coro): + """ + Runs the given event loop (designed to be called in a thread). + """ + asyncio.set_event_loop(loop) + try: + loop.run_until_complete(coro) + finally: + try: + # mimic asyncio.run() behavior + # cancel unexhausted async generators + tasks = asyncio.all_tasks(loop) + for task in tasks: + task.cancel() + + async def gather(): + await asyncio.gather(*tasks, return_exceptions=True) + + loop.run_until_complete(gather()) + for task in tasks: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during loop shutdown", + "exception": task.exception(), + "task": task, + } + ) + if hasattr(loop, "shutdown_asyncgens"): + loop.run_until_complete(loop.shutdown_asyncgens()) + finally: + loop.close() + asyncio.set_event_loop(self.main_event_loop) + + def __get__(self, parent, objtype): + """ + Include self for methods + """ + func = functools.partial(self.__call__, parent) + return functools.update_wrapper(func, self.awaitable) + + async def main_wrap( + self, args, kwargs, call_result, source_thread, exc_info, context + ): + """ + Wraps the awaitable with something that puts the result into the + result/exception future. + """ + if context is not None: + _restore_context(context[0]) + + current_task = SyncToAsync.get_current_task() + self.launch_map[current_task] = source_thread + try: + # If we have an exception, run the function inside the except block + # after raising it so exc_info is correctly populated. + if exc_info[1]: + try: + raise exc_info[1] + except BaseException: + result = await self.awaitable(*args, **kwargs) + else: + result = await self.awaitable(*args, **kwargs) + except BaseException as e: + call_result.set_exception(e) + else: + call_result.set_result(result) + finally: + del self.launch_map[current_task] + + context[0] = contextvars.copy_context() + + +class SyncToAsync: + """ + Utility class which turns a synchronous callable into an awaitable that + runs in a threadpool. It also sets a threadlocal inside the thread so + calls to AsyncToSync can escape it. + + If thread_sensitive is passed, the code will run in the same thread as any + outer code. This is needed for underlying Python code that is not + threadsafe (for example, code which handles SQLite database connections). + + If the outermost program is async (i.e. SyncToAsync is outermost), then + this will be a dedicated single sub-thread that all sync code runs in, + one after the other. If the outermost program is sync (i.e. AsyncToSync is + outermost), this will just be the main thread. This is achieved by idling + with a CurrentThreadExecutor while AsyncToSync is blocking its sync parent, + rather than just blocking. + + If executor is passed in, that will be used instead of the loop's default executor. + In order to pass in an executor, thread_sensitive must be set to False, otherwise + a TypeError will be raised. + """ + + # If they've set ASGI_THREADS, update the default asyncio executor for now + if "ASGI_THREADS" in os.environ: + # We use get_event_loop here - not get_running_loop - as this will + # be run at import time, and we want to update the main thread's loop. + loop = asyncio.get_event_loop() + loop.set_default_executor( + ThreadPoolExecutor(max_workers=int(os.environ["ASGI_THREADS"])) + ) + + # Maps launched threads to the coroutines that spawned them + launch_map: "Dict[threading.Thread, asyncio.Task[object]]" = {} + + # Storage for main event loop references + threadlocal = threading.local() + + # Single-thread executor for thread-sensitive code + single_thread_executor = ThreadPoolExecutor(max_workers=1) + + # Maintain a contextvar for the current execution context. Optionally used + # for thread sensitive mode. + thread_sensitive_context: "contextvars.ContextVar[str]" = contextvars.ContextVar( + "thread_sensitive_context" + ) + + # Contextvar that is used to detect if the single thread executor + # would be awaited on while already being used in the same context + deadlock_context: "contextvars.ContextVar[bool]" = contextvars.ContextVar( + "deadlock_context" + ) + + # Maintaining a weak reference to the context ensures that thread pools are + # erased once the context goes out of scope. This terminates the thread pool. + context_to_thread_executor: "weakref.WeakKeyDictionary[object, ThreadPoolExecutor]" = ( + weakref.WeakKeyDictionary() + ) + + def __init__( + self, + func: Callable[..., Any], + thread_sensitive: bool = True, + executor: Optional["ThreadPoolExecutor"] = None, + ) -> None: + if not callable(func) or _iscoroutinefunction_or_partial(func): + raise TypeError("sync_to_async can only be applied to sync functions.") + self.func = func + functools.update_wrapper(self, func) + self._thread_sensitive = thread_sensitive + self._is_coroutine = asyncio.coroutines._is_coroutine # type: ignore + if thread_sensitive and executor is not None: + raise TypeError("executor must not be set when thread_sensitive is True") + self._executor = executor + try: + self.__self__ = func.__self__ # type: ignore + except AttributeError: + pass + + async def __call__(self, *args, **kwargs): + loop = asyncio.get_running_loop() + + # Work out what thread to run the code in + if self._thread_sensitive: + if hasattr(AsyncToSync.executors, "current"): + # If we have a parent sync thread above somewhere, use that + executor = AsyncToSync.executors.current + elif self.thread_sensitive_context and self.thread_sensitive_context.get( + None + ): + # If we have a way of retrieving the current context, attempt + # to use a per-context thread pool executor + thread_sensitive_context = self.thread_sensitive_context.get() + + if thread_sensitive_context in self.context_to_thread_executor: + # Re-use thread executor in current context + executor = self.context_to_thread_executor[thread_sensitive_context] + else: + # Create new thread executor in current context + executor = ThreadPoolExecutor(max_workers=1) + self.context_to_thread_executor[thread_sensitive_context] = executor + elif loop in AsyncToSync.loop_thread_executors: + # Re-use thread executor for running loop + executor = AsyncToSync.loop_thread_executors[loop] + elif self.deadlock_context and self.deadlock_context.get(False): + raise RuntimeError( + "Single thread executor already being used, would deadlock" + ) + else: + # Otherwise, we run it in a fixed single thread + executor = self.single_thread_executor + if self.deadlock_context: + self.deadlock_context.set(True) + else: + # Use the passed in executor, or the loop's default if it is None + executor = self._executor + + context = contextvars.copy_context() + child = functools.partial(self.func, *args, **kwargs) + func = context.run + args = (child,) + kwargs = {} + + try: + # Run the code in the right thread + future = loop.run_in_executor( + executor, + functools.partial( + self.thread_handler, + loop, + self.get_current_task(), + sys.exc_info(), + func, + *args, + **kwargs, + ), + ) + ret = await asyncio.wait_for(future, timeout=None) + + finally: + _restore_context(context) + if self.deadlock_context: + self.deadlock_context.set(False) + + return ret + + def __get__(self, parent, objtype): + """ + Include self for methods + """ + return functools.partial(self.__call__, parent) + + def thread_handler(self, loop, source_task, exc_info, func, *args, **kwargs): + """ + Wraps the sync application with exception handling. + """ + # Set the threadlocal for AsyncToSync + self.threadlocal.main_event_loop = loop + self.threadlocal.main_event_loop_pid = os.getpid() + # Set the task mapping (used for the locals module) + current_thread = threading.current_thread() + if AsyncToSync.launch_map.get(source_task) == current_thread: + # Our parent task was launched from this same thread, so don't make + # a launch map entry - let it shortcut over us! (and stop infinite loops) + parent_set = False + else: + self.launch_map[current_thread] = source_task + parent_set = True + # Run the function + try: + # If we have an exception, run the function inside the except block + # after raising it so exc_info is correctly populated. + if exc_info[1]: + try: + raise exc_info[1] + except BaseException: + return func(*args, **kwargs) + else: + return func(*args, **kwargs) + finally: + # Only delete the launch_map parent if we set it, otherwise it is + # from someone else. + if parent_set: + del self.launch_map[current_thread] + + @staticmethod + def get_current_task(): + """ + Implementation of asyncio.current_task() + that returns None if there is no task. + """ + try: + return asyncio.current_task() + except RuntimeError: + return None + + +# Lowercase aliases (and decorator friendliness) +async_to_sync = AsyncToSync + + +@overload +def sync_to_async( + func: None = None, + thread_sensitive: bool = True, + executor: Optional["ThreadPoolExecutor"] = None, +) -> Callable[[Callable[..., Any]], SyncToAsync]: + ... + + +@overload +def sync_to_async( + func: Callable[..., Any], + thread_sensitive: bool = True, + executor: Optional["ThreadPoolExecutor"] = None, +) -> SyncToAsync: + ... + + +def sync_to_async( + func=None, + thread_sensitive=True, + executor=None, +): + if func is None: + return lambda f: SyncToAsync( + f, + thread_sensitive=thread_sensitive, + executor=executor, + ) + return SyncToAsync( + func, + thread_sensitive=thread_sensitive, + executor=executor, + ) diff --git a/venv/lib/python3.10/site-packages/asgiref/testing.py b/venv/lib/python3.10/site-packages/asgiref/testing.py new file mode 100644 index 0000000..6624317 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref/testing.py @@ -0,0 +1,97 @@ +import asyncio +import time + +from .compatibility import guarantee_single_callable +from .timeout import timeout as async_timeout + + +class ApplicationCommunicator: + """ + Runs an ASGI application in a test mode, allowing sending of + messages to it and retrieval of messages it sends. + """ + + def __init__(self, application, scope): + self.application = guarantee_single_callable(application) + self.scope = scope + self.input_queue = asyncio.Queue() + self.output_queue = asyncio.Queue() + self.future = asyncio.ensure_future( + self.application(scope, self.input_queue.get, self.output_queue.put) + ) + + async def wait(self, timeout=1): + """ + Waits for the application to stop itself and returns any exceptions. + """ + try: + async with async_timeout(timeout): + try: + await self.future + self.future.result() + except asyncio.CancelledError: + pass + finally: + if not self.future.done(): + self.future.cancel() + try: + await self.future + except asyncio.CancelledError: + pass + + def stop(self, exceptions=True): + if not self.future.done(): + self.future.cancel() + elif exceptions: + # Give a chance to raise any exceptions + self.future.result() + + def __del__(self): + # Clean up on deletion + try: + self.stop(exceptions=False) + except RuntimeError: + # Event loop already stopped + pass + + async def send_input(self, message): + """ + Sends a single message to the application + """ + # Give it the message + await self.input_queue.put(message) + + async def receive_output(self, timeout=1): + """ + Receives a single message from the application, with optional timeout. + """ + # Make sure there's not an exception to raise from the task + if self.future.done(): + self.future.result() + # Wait and receive the message + try: + async with async_timeout(timeout): + return await self.output_queue.get() + except asyncio.TimeoutError as e: + # See if we have another error to raise inside + if self.future.done(): + self.future.result() + else: + self.future.cancel() + try: + await self.future + except asyncio.CancelledError: + pass + raise e + + async def receive_nothing(self, timeout=0.1, interval=0.01): + """ + Checks that there is no message to receive in the given time. + """ + # `interval` has precedence over `timeout` + start = time.monotonic() + while time.monotonic() - start < timeout: + if not self.output_queue.empty(): + return False + await asyncio.sleep(interval) + return self.output_queue.empty() diff --git a/venv/lib/python3.10/site-packages/asgiref/timeout.py b/venv/lib/python3.10/site-packages/asgiref/timeout.py new file mode 100644 index 0000000..65932d1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref/timeout.py @@ -0,0 +1,112 @@ +# This code is originally sourced from the aio-libs project "async_timeout", +# under the Apache 2.0 license. You may see the original project at +# https://github.com/aio-libs/async-timeout + +# It is vendored here to reduce chain-dependencies on this library, and +# modified slightly to remove some features we don't use. + + +import asyncio +from types import TracebackType +from typing import Any, Optional, Type + + +class timeout: + """timeout context manager. + + Useful in cases when you want to apply timeout logic around block + of code or in cases when asyncio.wait_for is not suitable. For example: + + >>> with timeout(0.001): + ... async with aiohttp.get('https://github.com') as r: + ... await r.text() + + + timeout - value in seconds or None to disable timeout logic + loop - asyncio compatible event loop + """ + + def __init__( + self, + timeout: Optional[float], + *, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + self._timeout = timeout + if loop is None: + loop = asyncio.get_event_loop() + self._loop = loop + self._task = None # type: Optional[asyncio.Task[Any]] + self._cancelled = False + self._cancel_handler = None # type: Optional[asyncio.Handle] + self._cancel_at = None # type: Optional[float] + + def __enter__(self) -> "timeout": + return self._do_enter() + + def __exit__( + self, + exc_type: Type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType, + ) -> Optional[bool]: + self._do_exit(exc_type) + return None + + async def __aenter__(self) -> "timeout": + return self._do_enter() + + async def __aexit__( + self, + exc_type: Type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType, + ) -> None: + self._do_exit(exc_type) + + @property + def expired(self) -> bool: + return self._cancelled + + @property + def remaining(self) -> Optional[float]: + if self._cancel_at is not None: + return max(self._cancel_at - self._loop.time(), 0.0) + else: + return None + + def _do_enter(self) -> "timeout": + # Support Tornado 5- without timeout + # Details: https://github.com/python/asyncio/issues/392 + if self._timeout is None: + return self + + self._task = asyncio.current_task(self._loop) + if self._task is None: + raise RuntimeError( + "Timeout context manager should be used " "inside a task" + ) + + if self._timeout <= 0: + self._loop.call_soon(self._cancel_task) + return self + + self._cancel_at = self._loop.time() + self._timeout + self._cancel_handler = self._loop.call_at(self._cancel_at, self._cancel_task) + return self + + def _do_exit(self, exc_type: Type[BaseException]) -> None: + if exc_type is asyncio.CancelledError and self._cancelled: + self._cancel_handler = None + self._task = None + raise asyncio.TimeoutError + if self._timeout is not None and self._cancel_handler is not None: + self._cancel_handler.cancel() + self._cancel_handler = None + self._task = None + return None + + def _cancel_task(self) -> None: + if self._task is not None: + self._task.cancel() + self._cancelled = True diff --git a/venv/lib/python3.10/site-packages/asgiref/typing.py b/venv/lib/python3.10/site-packages/asgiref/typing.py new file mode 100644 index 0000000..c7d7576 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref/typing.py @@ -0,0 +1,242 @@ +import sys +from typing import Awaitable, Callable, Dict, Iterable, Optional, Tuple, Type, Union + +if sys.version_info >= (3, 8): + from typing import Literal, Protocol, TypedDict +else: + from typing_extensions import Literal, Protocol, TypedDict + +__all__ = ( + "ASGIVersions", + "HTTPScope", + "WebSocketScope", + "LifespanScope", + "WWWScope", + "Scope", + "HTTPRequestEvent", + "HTTPResponseStartEvent", + "HTTPResponseBodyEvent", + "HTTPServerPushEvent", + "HTTPDisconnectEvent", + "WebSocketConnectEvent", + "WebSocketAcceptEvent", + "WebSocketReceiveEvent", + "WebSocketSendEvent", + "WebSocketResponseStartEvent", + "WebSocketResponseBodyEvent", + "WebSocketDisconnectEvent", + "WebSocketCloseEvent", + "LifespanStartupEvent", + "LifespanShutdownEvent", + "LifespanStartupCompleteEvent", + "LifespanStartupFailedEvent", + "LifespanShutdownCompleteEvent", + "LifespanShutdownFailedEvent", + "ASGIReceiveEvent", + "ASGISendEvent", + "ASGIReceiveCallable", + "ASGISendCallable", + "ASGI2Protocol", + "ASGI2Application", + "ASGI3Application", + "ASGIApplication", +) + + +class ASGIVersions(TypedDict): + spec_version: str + version: Union[Literal["2.0"], Literal["3.0"]] + + +class HTTPScope(TypedDict): + type: Literal["http"] + asgi: ASGIVersions + http_version: str + method: str + scheme: str + path: str + raw_path: bytes + query_string: bytes + root_path: str + headers: Iterable[Tuple[bytes, bytes]] + client: Optional[Tuple[str, int]] + server: Optional[Tuple[str, Optional[int]]] + extensions: Optional[Dict[str, Dict[object, object]]] + + +class WebSocketScope(TypedDict): + type: Literal["websocket"] + asgi: ASGIVersions + http_version: str + scheme: str + path: str + raw_path: bytes + query_string: bytes + root_path: str + headers: Iterable[Tuple[bytes, bytes]] + client: Optional[Tuple[str, int]] + server: Optional[Tuple[str, Optional[int]]] + subprotocols: Iterable[str] + extensions: Optional[Dict[str, Dict[object, object]]] + + +class LifespanScope(TypedDict): + type: Literal["lifespan"] + asgi: ASGIVersions + + +WWWScope = Union[HTTPScope, WebSocketScope] +Scope = Union[HTTPScope, WebSocketScope, LifespanScope] + + +class HTTPRequestEvent(TypedDict): + type: Literal["http.request"] + body: bytes + more_body: bool + + +class HTTPResponseStartEvent(TypedDict): + type: Literal["http.response.start"] + status: int + headers: Iterable[Tuple[bytes, bytes]] + + +class HTTPResponseBodyEvent(TypedDict): + type: Literal["http.response.body"] + body: bytes + more_body: bool + + +class HTTPServerPushEvent(TypedDict): + type: Literal["http.response.push"] + path: str + headers: Iterable[Tuple[bytes, bytes]] + + +class HTTPDisconnectEvent(TypedDict): + type: Literal["http.disconnect"] + + +class WebSocketConnectEvent(TypedDict): + type: Literal["websocket.connect"] + + +class WebSocketAcceptEvent(TypedDict): + type: Literal["websocket.accept"] + subprotocol: Optional[str] + headers: Iterable[Tuple[bytes, bytes]] + + +class WebSocketReceiveEvent(TypedDict): + type: Literal["websocket.receive"] + bytes: Optional[bytes] + text: Optional[str] + + +class WebSocketSendEvent(TypedDict): + type: Literal["websocket.send"] + bytes: Optional[bytes] + text: Optional[str] + + +class WebSocketResponseStartEvent(TypedDict): + type: Literal["websocket.http.response.start"] + status: int + headers: Iterable[Tuple[bytes, bytes]] + + +class WebSocketResponseBodyEvent(TypedDict): + type: Literal["websocket.http.response.body"] + body: bytes + more_body: bool + + +class WebSocketDisconnectEvent(TypedDict): + type: Literal["websocket.disconnect"] + code: int + + +class WebSocketCloseEvent(TypedDict): + type: Literal["websocket.close"] + code: int + reason: Optional[str] + + +class LifespanStartupEvent(TypedDict): + type: Literal["lifespan.startup"] + + +class LifespanShutdownEvent(TypedDict): + type: Literal["lifespan.shutdown"] + + +class LifespanStartupCompleteEvent(TypedDict): + type: Literal["lifespan.startup.complete"] + + +class LifespanStartupFailedEvent(TypedDict): + type: Literal["lifespan.startup.failed"] + message: str + + +class LifespanShutdownCompleteEvent(TypedDict): + type: Literal["lifespan.shutdown.complete"] + + +class LifespanShutdownFailedEvent(TypedDict): + type: Literal["lifespan.shutdown.failed"] + message: str + + +ASGIReceiveEvent = Union[ + HTTPRequestEvent, + HTTPDisconnectEvent, + WebSocketConnectEvent, + WebSocketReceiveEvent, + WebSocketDisconnectEvent, + LifespanStartupEvent, + LifespanShutdownEvent, +] + + +ASGISendEvent = Union[ + HTTPResponseStartEvent, + HTTPResponseBodyEvent, + HTTPServerPushEvent, + HTTPDisconnectEvent, + WebSocketAcceptEvent, + WebSocketSendEvent, + WebSocketResponseStartEvent, + WebSocketResponseBodyEvent, + WebSocketCloseEvent, + LifespanStartupCompleteEvent, + LifespanStartupFailedEvent, + LifespanShutdownCompleteEvent, + LifespanShutdownFailedEvent, +] + + +ASGIReceiveCallable = Callable[[], Awaitable[ASGIReceiveEvent]] +ASGISendCallable = Callable[[ASGISendEvent], Awaitable[None]] + + +class ASGI2Protocol(Protocol): + def __init__(self, scope: Scope) -> None: + ... + + async def __call__( + self, receive: ASGIReceiveCallable, send: ASGISendCallable + ) -> None: + ... + + +ASGI2Application = Type[ASGI2Protocol] +ASGI3Application = Callable[ + [ + Scope, + ASGIReceiveCallable, + ASGISendCallable, + ], + Awaitable[None], +] +ASGIApplication = Union[ASGI2Application, ASGI3Application] diff --git a/venv/lib/python3.10/site-packages/asgiref/wsgi.py b/venv/lib/python3.10/site-packages/asgiref/wsgi.py new file mode 100644 index 0000000..40fba20 --- /dev/null +++ b/venv/lib/python3.10/site-packages/asgiref/wsgi.py @@ -0,0 +1,162 @@ +from io import BytesIO +from tempfile import SpooledTemporaryFile + +from asgiref.sync import AsyncToSync, sync_to_async + + +class WsgiToAsgi: + """ + Wraps a WSGI application to make it into an ASGI application. + """ + + def __init__(self, wsgi_application): + self.wsgi_application = wsgi_application + + async def __call__(self, scope, receive, send): + """ + ASGI application instantiation point. + We return a new WsgiToAsgiInstance here with the WSGI app + and the scope, ready to respond when it is __call__ed. + """ + await WsgiToAsgiInstance(self.wsgi_application)(scope, receive, send) + + +class WsgiToAsgiInstance: + """ + Per-socket instance of a wrapped WSGI application + """ + + def __init__(self, wsgi_application): + self.wsgi_application = wsgi_application + self.response_started = False + self.response_content_length = None + + async def __call__(self, scope, receive, send): + if scope["type"] != "http": + raise ValueError("WSGI wrapper received a non-HTTP scope") + self.scope = scope + with SpooledTemporaryFile(max_size=65536) as body: + # Alright, wait for the http.request messages + while True: + message = await receive() + if message["type"] != "http.request": + raise ValueError("WSGI wrapper received a non-HTTP-request message") + body.write(message.get("body", b"")) + if not message.get("more_body"): + break + body.seek(0) + # Wrap send so it can be called from the subthread + self.sync_send = AsyncToSync(send) + # Call the WSGI app + await self.run_wsgi_app(body) + + def build_environ(self, scope, body): + """ + Builds a scope and request body into a WSGI environ object. + """ + environ = { + "REQUEST_METHOD": scope["method"], + "SCRIPT_NAME": scope.get("root_path", "").encode("utf8").decode("latin1"), + "PATH_INFO": scope["path"].encode("utf8").decode("latin1"), + "QUERY_STRING": scope["query_string"].decode("ascii"), + "SERVER_PROTOCOL": "HTTP/%s" % scope["http_version"], + "wsgi.version": (1, 0), + "wsgi.url_scheme": scope.get("scheme", "http"), + "wsgi.input": body, + "wsgi.errors": BytesIO(), + "wsgi.multithread": True, + "wsgi.multiprocess": True, + "wsgi.run_once": False, + } + # Get server name and port - required in WSGI, not in ASGI + if "server" in scope: + environ["SERVER_NAME"] = scope["server"][0] + environ["SERVER_PORT"] = str(scope["server"][1]) + else: + environ["SERVER_NAME"] = "localhost" + environ["SERVER_PORT"] = "80" + + if "client" in scope: + environ["REMOTE_ADDR"] = scope["client"][0] + + # Go through headers and make them into environ entries + for name, value in self.scope.get("headers", []): + name = name.decode("latin1") + if name == "content-length": + corrected_name = "CONTENT_LENGTH" + elif name == "content-type": + corrected_name = "CONTENT_TYPE" + else: + corrected_name = "HTTP_%s" % name.upper().replace("-", "_") + # HTTPbis say only ASCII chars are allowed in headers, but we latin1 just in case + value = value.decode("latin1") + if corrected_name in environ: + value = environ[corrected_name] + "," + value + environ[corrected_name] = value + return environ + + def start_response(self, status, response_headers, exc_info=None): + """ + WSGI start_response callable. + """ + # Don't allow re-calling once response has begun + if self.response_started: + raise exc_info[1].with_traceback(exc_info[2]) + # Don't allow re-calling without exc_info + if hasattr(self, "response_start") and exc_info is None: + raise ValueError( + "You cannot call start_response a second time without exc_info" + ) + # Extract status code + status_code, _ = status.split(" ", 1) + status_code = int(status_code) + # Extract headers + headers = [ + (name.lower().encode("ascii"), value.encode("ascii")) + for name, value in response_headers + ] + # Extract content-length + self.response_content_length = None + for name, value in response_headers: + if name.lower() == "content-length": + self.response_content_length = int(value) + # Build and send response start message. + self.response_start = { + "type": "http.response.start", + "status": status_code, + "headers": headers, + } + + @sync_to_async + def run_wsgi_app(self, body): + """ + Called in a subthread to run the WSGI app. We encapsulate like + this so that the start_response callable is called in the same thread. + """ + # Translate the scope and incoming request body into a WSGI environ + environ = self.build_environ(self.scope, body) + # Run the WSGI app + bytes_sent = 0 + for output in self.wsgi_application(environ, self.start_response): + # If this is the first response, include the response headers + if not self.response_started: + self.response_started = True + self.sync_send(self.response_start) + # If the application supplies a Content-Length header + if self.response_content_length is not None: + # The server should not transmit more bytes to the client than the header allows + bytes_allowed = self.response_content_length - bytes_sent + if len(output) > bytes_allowed: + output = output[:bytes_allowed] + self.sync_send( + {"type": "http.response.body", "body": output, "more_body": True} + ) + bytes_sent += len(output) + # The server should stop iterating over the response when enough data has been sent + if bytes_sent == self.response_content_length: + break + # Close connection + if not self.response_started: + self.response_started = True + self.sync_send(self.response_start) + self.sync_send({"type": "http.response.body"}) diff --git a/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/LICENSE b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/LICENSE new file mode 100644 index 0000000..033c86b --- /dev/null +++ b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright 2016-2020 aio-libs collaboration. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/METADATA b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/METADATA new file mode 100644 index 0000000..e68d234 --- /dev/null +++ b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/METADATA @@ -0,0 +1,133 @@ +Metadata-Version: 2.1 +Name: async-timeout +Version: 4.0.2 +Summary: Timeout context manager for asyncio programs +Home-page: https://github.com/aio-libs/async-timeout +Author: Andrew Svetlov +Author-email: andrew.svetlov@gmail.com +License: Apache 2 +Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby +Project-URL: CI: GitHub Actions, https://github.com/aio-libs/async-timeout/actions +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/async-timeout +Project-URL: GitHub: issues, https://github.com/aio-libs/async-timeout/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/async-timeout +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Topic :: Software Development :: Libraries +Classifier: Framework :: AsyncIO +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: typing-extensions (>=3.6.5) ; python_version < "3.8" + +async-timeout +============= +.. image:: https://travis-ci.com/aio-libs/async-timeout.svg?branch=master + :target: https://travis-ci.com/aio-libs/async-timeout +.. image:: https://codecov.io/gh/aio-libs/async-timeout/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/async-timeout +.. image:: https://img.shields.io/pypi/v/async-timeout.svg + :target: https://pypi.python.org/pypi/async-timeout +.. image:: https://badges.gitter.im/Join%20Chat.svg + :target: https://gitter.im/aio-libs/Lobby + :alt: Chat on Gitter + +asyncio-compatible timeout context manager. + + +Usage example +------------- + + +The context manager is useful in cases when you want to apply timeout +logic around block of code or in cases when ``asyncio.wait_for()`` is +not suitable. Also it's much faster than ``asyncio.wait_for()`` +because ``timeout`` doesn't create a new task. + +The ``timeout(delay, *, loop=None)`` call returns a context manager +that cancels a block on *timeout* expiring:: + + async with timeout(1.5): + await inner() + +1. If ``inner()`` is executed faster than in ``1.5`` seconds nothing + happens. +2. Otherwise ``inner()`` is cancelled internally by sending + ``asyncio.CancelledError`` into but ``asyncio.TimeoutError`` is + raised outside of context manager scope. + +*timeout* parameter could be ``None`` for skipping timeout functionality. + + +Alternatively, ``timeout_at(when)`` can be used for scheduling +at the absolute time:: + + loop = asyncio.get_event_loop() + now = loop.time() + + async with timeout_at(now + 1.5): + await inner() + + +Please note: it is not POSIX time but a time with +undefined starting base, e.g. the time of the system power on. + + +Context manager has ``.expired`` property for check if timeout happens +exactly in context manager:: + + async with timeout(1.5) as cm: + await inner() + print(cm.expired) + +The property is ``True`` if ``inner()`` execution is cancelled by +timeout context manager. + +If ``inner()`` call explicitly raises ``TimeoutError`` ``cm.expired`` +is ``False``. + +The scheduled deadline time is available as ``.deadline`` property:: + + async with timeout(1.5) as cm: + cm.deadline + +Not finished yet timeout can be rescheduled by ``shift_by()`` +or ``shift_to()`` methods:: + + async with timeout(1.5) as cm: + cm.shift(1) # add another second on waiting + cm.update(loop.time() + 5) # reschedule to now+5 seconds + +Rescheduling is forbidden if the timeout is expired or after exit from ``async with`` +code block. + + +Installation +------------ + +:: + + $ pip install async-timeout + +The library is Python 3 only! + + + +Authors and License +------------------- + +The module is written by Andrew Svetlov. + +It's *Apache 2* licensed and freely available. + + diff --git a/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/RECORD b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/RECORD new file mode 100644 index 0000000..6789eb0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/RECORD @@ -0,0 +1,10 @@ +async_timeout-4.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +async_timeout-4.0.2.dist-info/LICENSE,sha256=4Y17uPUT4sRrtYXJS1hb0wcg3TzLId2weG9y0WZY-Sw,568 +async_timeout-4.0.2.dist-info/METADATA,sha256=2pfMxxBst5vQ7SfMy5TDaDU0cRgCSQa7wcD5eI-Ew-8,4193 +async_timeout-4.0.2.dist-info/RECORD,, +async_timeout-4.0.2.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +async_timeout-4.0.2.dist-info/top_level.txt,sha256=9oM4e7Twq8iD_7_Q3Mz0E6GPIB6vJvRFo-UBwUQtBDU,14 +async_timeout-4.0.2.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +async_timeout/__init__.py,sha256=N-JUI_VExhHnO0emkF_-h08dl4HBgOje16N4Ci-W-go,7487 +async_timeout/__pycache__/__init__.cpython-310.pyc,, +async_timeout/py.typed,sha256=tyozzRT1fziXETDxokmuyt6jhOmtjUbnVNJdZcG7ik0,12 diff --git a/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/WHEEL b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/top_level.txt new file mode 100644 index 0000000..ad29955 --- /dev/null +++ b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +async_timeout diff --git a/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/zip-safe b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/venv/lib/python3.10/site-packages/async_timeout-4.0.2.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/venv/lib/python3.10/site-packages/async_timeout/__init__.py b/venv/lib/python3.10/site-packages/async_timeout/__init__.py new file mode 100644 index 0000000..179d1b0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/async_timeout/__init__.py @@ -0,0 +1,247 @@ +import asyncio +import enum +import sys +import warnings +from types import TracebackType +from typing import Any, Optional, Type + + +if sys.version_info >= (3, 8): + from typing import final +else: + from typing_extensions import final + + +__version__ = "4.0.2" + + +__all__ = ("timeout", "timeout_at", "Timeout") + + +def timeout(delay: Optional[float]) -> "Timeout": + """timeout context manager. + + Useful in cases when you want to apply timeout logic around block + of code or in cases when asyncio.wait_for is not suitable. For example: + + >>> async with timeout(0.001): + ... async with aiohttp.get('https://github.com') as r: + ... await r.text() + + + delay - value in seconds or None to disable timeout logic + """ + loop = _get_running_loop() + if delay is not None: + deadline = loop.time() + delay # type: Optional[float] + else: + deadline = None + return Timeout(deadline, loop) + + +def timeout_at(deadline: Optional[float]) -> "Timeout": + """Schedule the timeout at absolute time. + + deadline argument points on the time in the same clock system + as loop.time(). + + Please note: it is not POSIX time but a time with + undefined starting base, e.g. the time of the system power on. + + >>> async with timeout_at(loop.time() + 10): + ... async with aiohttp.get('https://github.com') as r: + ... await r.text() + + + """ + loop = _get_running_loop() + return Timeout(deadline, loop) + + +class _State(enum.Enum): + INIT = "INIT" + ENTER = "ENTER" + TIMEOUT = "TIMEOUT" + EXIT = "EXIT" + + +@final +class Timeout: + # Internal class, please don't instantiate it directly + # Use timeout() and timeout_at() public factories instead. + # + # Implementation note: `async with timeout()` is preferred + # over `with timeout()`. + # While technically the Timeout class implementation + # doesn't need to be async at all, + # the `async with` statement explicitly points that + # the context manager should be used from async function context. + # + # This design allows to avoid many silly misusages. + # + # TimeoutError is raised immadiatelly when scheduled + # if the deadline is passed. + # The purpose is to time out as sson as possible + # without waiting for the next await expression. + + __slots__ = ("_deadline", "_loop", "_state", "_timeout_handler") + + def __init__( + self, deadline: Optional[float], loop: asyncio.AbstractEventLoop + ) -> None: + self._loop = loop + self._state = _State.INIT + + self._timeout_handler = None # type: Optional[asyncio.Handle] + if deadline is None: + self._deadline = None # type: Optional[float] + else: + self.update(deadline) + + def __enter__(self) -> "Timeout": + warnings.warn( + "with timeout() is deprecated, use async with timeout() instead", + DeprecationWarning, + stacklevel=2, + ) + self._do_enter() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + self._do_exit(exc_type) + return None + + async def __aenter__(self) -> "Timeout": + self._do_enter() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + self._do_exit(exc_type) + return None + + @property + def expired(self) -> bool: + """Is timeout expired during execution?""" + return self._state == _State.TIMEOUT + + @property + def deadline(self) -> Optional[float]: + return self._deadline + + def reject(self) -> None: + """Reject scheduled timeout if any.""" + # cancel is maybe better name but + # task.cancel() raises CancelledError in asyncio world. + if self._state not in (_State.INIT, _State.ENTER): + raise RuntimeError(f"invalid state {self._state.value}") + self._reject() + + def _reject(self) -> None: + if self._timeout_handler is not None: + self._timeout_handler.cancel() + self._timeout_handler = None + + def shift(self, delay: float) -> None: + """Advance timeout on delay seconds. + + The delay can be negative. + + Raise RuntimeError if shift is called when deadline is not scheduled + """ + deadline = self._deadline + if deadline is None: + raise RuntimeError("cannot shift timeout if deadline is not scheduled") + self.update(deadline + delay) + + def update(self, deadline: float) -> None: + """Set deadline to absolute value. + + deadline argument points on the time in the same clock system + as loop.time(). + + If new deadline is in the past the timeout is raised immediatelly. + + Please note: it is not POSIX time but a time with + undefined starting base, e.g. the time of the system power on. + """ + if self._state == _State.EXIT: + raise RuntimeError("cannot reschedule after exit from context manager") + if self._state == _State.TIMEOUT: + raise RuntimeError("cannot reschedule expired timeout") + if self._timeout_handler is not None: + self._timeout_handler.cancel() + self._deadline = deadline + if self._state != _State.INIT: + self._reschedule() + + def _reschedule(self) -> None: + assert self._state == _State.ENTER + deadline = self._deadline + if deadline is None: + return + + now = self._loop.time() + if self._timeout_handler is not None: + self._timeout_handler.cancel() + + task = _current_task(self._loop) + if deadline <= now: + self._timeout_handler = self._loop.call_soon(self._on_timeout, task) + else: + self._timeout_handler = self._loop.call_at(deadline, self._on_timeout, task) + + def _do_enter(self) -> None: + if self._state != _State.INIT: + raise RuntimeError(f"invalid state {self._state.value}") + self._state = _State.ENTER + self._reschedule() + + def _do_exit(self, exc_type: Optional[Type[BaseException]]) -> None: + if exc_type is asyncio.CancelledError and self._state == _State.TIMEOUT: + self._timeout_handler = None + raise asyncio.TimeoutError + # timeout has not expired + self._state = _State.EXIT + self._reject() + return None + + def _on_timeout(self, task: "asyncio.Task[None]") -> None: + task.cancel() + self._state = _State.TIMEOUT + # drop the reference early + self._timeout_handler = None + + +if sys.version_info >= (3, 7): + + def _current_task(loop: asyncio.AbstractEventLoop) -> "Optional[asyncio.Task[Any]]": + return asyncio.current_task(loop=loop) + +else: + + def _current_task(loop: asyncio.AbstractEventLoop) -> "Optional[asyncio.Task[Any]]": + return asyncio.Task.current_task(loop=loop) + + +if sys.version_info >= (3, 7): + + def _get_running_loop() -> asyncio.AbstractEventLoop: + return asyncio.get_running_loop() + +else: + + def _get_running_loop() -> asyncio.AbstractEventLoop: + loop = asyncio.get_event_loop() + if not loop.is_running(): + raise RuntimeError("no running event loop") + return loop diff --git a/venv/lib/python3.10/site-packages/async_timeout/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/async_timeout/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..a652a4c Binary files /dev/null and b/venv/lib/python3.10/site-packages/async_timeout/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/async_timeout/py.typed b/venv/lib/python3.10/site-packages/async_timeout/py.typed new file mode 100644 index 0000000..3b94f91 --- /dev/null +++ b/venv/lib/python3.10/site-packages/async_timeout/py.typed @@ -0,0 +1 @@ +Placeholder diff --git a/venv/lib/python3.10/site-packages/attr/__init__.py b/venv/lib/python3.10/site-packages/attr/__init__.py new file mode 100644 index 0000000..0424378 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/__init__.py @@ -0,0 +1,93 @@ +# SPDX-License-Identifier: MIT + +import sys +import warnings + +from functools import partial + +from . import converters, exceptions, filters, setters, validators +from ._cmp import cmp_using +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types +from ._make import ( + NOTHING, + Attribute, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, +) +from ._next_gen import define, field, frozen, mutable +from ._version_info import VersionInfo + + +if sys.version_info < (3, 7): # pragma: no cover + warnings.warn( + "Running attrs on Python 3.6 is deprecated & we intend to drop " + "support soon. If that's a problem for you, please let us know why & " + "we MAY re-evaluate: ", + DeprecationWarning, + ) + +__version__ = "22.2.0" +__version_info__ = VersionInfo._from_version_string(__version__) + +__title__ = "attrs" +__description__ = "Classes Without Boilerplate" +__url__ = "https://www.attrs.org/" +__uri__ = __url__ +__doc__ = __description__ + " <" + __uri__ + ">" + +__author__ = "Hynek Schlawack" +__email__ = "hs@ox.cx" + +__license__ = "MIT" +__copyright__ = "Copyright (c) 2015 Hynek Schlawack" + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + + +class AttrsInstance: + pass + + +__all__ = [ + "Attribute", + "AttrsInstance", + "Factory", + "NOTHING", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "field", + "fields", + "fields_dict", + "filters", + "frozen", + "get_run_validators", + "has", + "ib", + "make_class", + "mutable", + "resolve_types", + "s", + "set_run_validators", + "setters", + "validate", + "validators", +] diff --git a/venv/lib/python3.10/site-packages/attr/__init__.pyi b/venv/lib/python3.10/site-packages/attr/__init__.pyi new file mode 100644 index 0000000..42a2ee2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/__init__.pyi @@ -0,0 +1,509 @@ +import enum +import sys + +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Mapping, + Optional, + Protocol, + Sequence, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +# `import X as X` is required to make these public +from . import converters as converters +from . import exceptions as exceptions +from . import filters as filters +from . import setters as setters +from . import validators as validators +from ._cmp import cmp_using as cmp_using +from ._typing_compat import AttrsInstance_ +from ._version_info import VersionInfo + +if sys.version_info >= (3, 10): + from typing import TypeGuard +else: + from typing_extensions import TypeGuard + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_EqOrderType = Union[bool, Callable[[Any], Any]] +_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any] +_ConverterType = Callable[[Any], Any] +_FilterType = Callable[["Attribute[_T]", _T], bool] +_ReprType = Callable[[Any], str] +_ReprArgType = Union[bool, _ReprType] +_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any] +_OnSetAttrArgType = Union[ + _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType +] +_FieldTransformer = Callable[ + [type, List["Attribute[Any]"]], List["Attribute[Any]"] +] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] + +# We subclass this here to keep the protocol's qualified name clean. +class AttrsInstance(AttrsInstance_, Protocol): + pass + +# _make -- + +class _Nothing(enum.Enum): + NOTHING = enum.auto() + +NOTHING = _Nothing.NOTHING + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +if sys.version_info >= (3, 8): + from typing import Literal + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Callable[[Any], _T], + takes_self: Literal[True], + ) -> _T: ... + @overload + def Factory( + factory: Callable[[], _T], + takes_self: Literal[False], + ) -> _T: ... + +else: + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., + ) -> _T: ... + +# Static type inference support via __dataclass_transform__ implemented as per: +# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md +# This annotation must be applied to all overloads of "define" and "attrs" +# +# NOTE: This is a typing construct and does not exist at runtime. Extensions +# wrapping attrs decorators should declare a separate __dataclass_transform__ +# signature in the extension module using the specification linked above to +# provide pyright support. +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), +) -> Callable[[_T], _T]: ... + +class Attribute(Generic[_T]): + name: str + default: Optional[_T] + validator: Optional[_ValidatorType[_T]] + repr: _ReprArgType + cmp: _EqOrderType + eq: _EqOrderType + order: _EqOrderType + hash: Optional[bool] + init: bool + converter: Optional[_ConverterType] + metadata: Dict[Any, Any] + type: Optional[Type[_T]] + kw_only: bool + on_setattr: _OnSetAttrType + alias: Optional[str] + + def evolve(self, **changes: Any) -> "Attribute[Any]": ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: object = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: _C, + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., + unsafe_hash: Optional[bool] = ..., +) -> _C: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: None = ..., + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., + unsafe_hash: Optional[bool] = ..., +) -> Callable[[_C], _C]: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: _C, + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + unsafe_hash: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: None = ..., + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + unsafe_hash: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +mutable = define +frozen = define # they differ only in their defaults + +def fields(cls: Type[AttrsInstance]) -> Any: ... +def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ... +def validate(inst: AttrsInstance) -> None: ... +def resolve_types( + cls: _C, + globalns: Optional[Dict[str, Any]] = ..., + localns: Optional[Dict[str, Any]] = ..., + attribs: Optional[List[Attribute[Any]]] = ..., +) -> _C: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], + bases: Tuple[type, ...] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + collect_by_mro: bool = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +# XXX: remember to fix attrs.asdict/astuple too! +def asdict( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: Optional[bool] = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... +def has(cls: type) -> TypeGuard[Type[AttrsInstance]]: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/venv/lib/python3.10/site-packages/attr/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/attr/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..e5347bf Binary files /dev/null and b/venv/lib/python3.10/site-packages/attr/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attr/__pycache__/_cmp.cpython-310.pyc b/venv/lib/python3.10/site-packages/attr/__pycache__/_cmp.cpython-310.pyc new file mode 100644 index 0000000..6677a7e Binary files /dev/null and b/venv/lib/python3.10/site-packages/attr/__pycache__/_cmp.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attr/__pycache__/_compat.cpython-310.pyc b/venv/lib/python3.10/site-packages/attr/__pycache__/_compat.cpython-310.pyc new file mode 100644 index 0000000..cc6e0c9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/attr/__pycache__/_compat.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attr/__pycache__/_config.cpython-310.pyc b/venv/lib/python3.10/site-packages/attr/__pycache__/_config.cpython-310.pyc new file mode 100644 index 0000000..8925f19 Binary files /dev/null and b/venv/lib/python3.10/site-packages/attr/__pycache__/_config.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attr/__pycache__/_funcs.cpython-310.pyc b/venv/lib/python3.10/site-packages/attr/__pycache__/_funcs.cpython-310.pyc new file mode 100644 index 0000000..4db8d57 Binary files /dev/null and b/venv/lib/python3.10/site-packages/attr/__pycache__/_funcs.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attr/__pycache__/_make.cpython-310.pyc b/venv/lib/python3.10/site-packages/attr/__pycache__/_make.cpython-310.pyc new file mode 100644 index 0000000..c3c6e8e Binary files /dev/null and b/venv/lib/python3.10/site-packages/attr/__pycache__/_make.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attr/__pycache__/_next_gen.cpython-310.pyc b/venv/lib/python3.10/site-packages/attr/__pycache__/_next_gen.cpython-310.pyc new file mode 100644 index 0000000..81dd861 Binary files /dev/null and b/venv/lib/python3.10/site-packages/attr/__pycache__/_next_gen.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attr/__pycache__/_version_info.cpython-310.pyc b/venv/lib/python3.10/site-packages/attr/__pycache__/_version_info.cpython-310.pyc new file mode 100644 index 0000000..91eee5e Binary files /dev/null and b/venv/lib/python3.10/site-packages/attr/__pycache__/_version_info.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attr/__pycache__/converters.cpython-310.pyc b/venv/lib/python3.10/site-packages/attr/__pycache__/converters.cpython-310.pyc new file mode 100644 index 0000000..ce69674 Binary files /dev/null and b/venv/lib/python3.10/site-packages/attr/__pycache__/converters.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attr/__pycache__/exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/attr/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000..64edb09 Binary files /dev/null and b/venv/lib/python3.10/site-packages/attr/__pycache__/exceptions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attr/__pycache__/filters.cpython-310.pyc b/venv/lib/python3.10/site-packages/attr/__pycache__/filters.cpython-310.pyc new file mode 100644 index 0000000..046f69c Binary files /dev/null and b/venv/lib/python3.10/site-packages/attr/__pycache__/filters.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attr/__pycache__/setters.cpython-310.pyc b/venv/lib/python3.10/site-packages/attr/__pycache__/setters.cpython-310.pyc new file mode 100644 index 0000000..603a2fd Binary files /dev/null and b/venv/lib/python3.10/site-packages/attr/__pycache__/setters.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attr/__pycache__/validators.cpython-310.pyc b/venv/lib/python3.10/site-packages/attr/__pycache__/validators.cpython-310.pyc new file mode 100644 index 0000000..a617fec Binary files /dev/null and b/venv/lib/python3.10/site-packages/attr/__pycache__/validators.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attr/_cmp.py b/venv/lib/python3.10/site-packages/attr/_cmp.py new file mode 100644 index 0000000..ad1e18c --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/_cmp.py @@ -0,0 +1,155 @@ +# SPDX-License-Identifier: MIT + + +import functools +import types + +from ._make import _make_ne + + +_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} + + +def cmp_using( + eq=None, + lt=None, + le=None, + gt=None, + ge=None, + require_same_type=True, + class_name="Comparable", +): + """ + Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and + ``cmp`` arguments to customize field comparison. + + The resulting class will have a full set of ordering methods if + at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided. + + :param Optional[callable] eq: `callable` used to evaluate equality + of two objects. + :param Optional[callable] lt: `callable` used to evaluate whether + one object is less than another object. + :param Optional[callable] le: `callable` used to evaluate whether + one object is less than or equal to another object. + :param Optional[callable] gt: `callable` used to evaluate whether + one object is greater than another object. + :param Optional[callable] ge: `callable` used to evaluate whether + one object is greater than or equal to another object. + + :param bool require_same_type: When `True`, equality and ordering methods + will return `NotImplemented` if objects are not of the same type. + + :param Optional[str] class_name: Name of class. Defaults to 'Comparable'. + + See `comparison` for more details. + + .. versionadded:: 21.1.0 + """ + + body = { + "__slots__": ["value"], + "__init__": _make_init(), + "_requirements": [], + "_is_comparable_to": _is_comparable_to, + } + + # Add operations. + num_order_functions = 0 + has_eq_function = False + + if eq is not None: + has_eq_function = True + body["__eq__"] = _make_operator("eq", eq) + body["__ne__"] = _make_ne() + + if lt is not None: + num_order_functions += 1 + body["__lt__"] = _make_operator("lt", lt) + + if le is not None: + num_order_functions += 1 + body["__le__"] = _make_operator("le", le) + + if gt is not None: + num_order_functions += 1 + body["__gt__"] = _make_operator("gt", gt) + + if ge is not None: + num_order_functions += 1 + body["__ge__"] = _make_operator("ge", ge) + + type_ = types.new_class( + class_name, (object,), {}, lambda ns: ns.update(body) + ) + + # Add same type requirement. + if require_same_type: + type_._requirements.append(_check_same_type) + + # Add total ordering if at least one operation was defined. + if 0 < num_order_functions < 4: + if not has_eq_function: + # functools.total_ordering requires __eq__ to be defined, + # so raise early error here to keep a nice stack. + raise ValueError( + "eq must be define is order to complete ordering from " + "lt, le, gt, ge." + ) + type_ = functools.total_ordering(type_) + + return type_ + + +def _make_init(): + """ + Create __init__ method. + """ + + def __init__(self, value): + """ + Initialize object with *value*. + """ + self.value = value + + return __init__ + + +def _make_operator(name, func): + """ + Create operator method. + """ + + def method(self, other): + if not self._is_comparable_to(other): + return NotImplemented + + result = func(self.value, other.value) + if result is NotImplemented: + return NotImplemented + + return result + + method.__name__ = f"__{name}__" + method.__doc__ = ( + f"Return a {_operation_names[name]} b. Computed by attrs." + ) + + return method + + +def _is_comparable_to(self, other): + """ + Check whether `other` is comparable to `self`. + """ + for func in self._requirements: + if not func(self, other): + return False + return True + + +def _check_same_type(self, other): + """ + Return True if *self* and *other* are of the same type, False otherwise. + """ + return other.value.__class__ is self.value.__class__ diff --git a/venv/lib/python3.10/site-packages/attr/_cmp.pyi b/venv/lib/python3.10/site-packages/attr/_cmp.pyi new file mode 100644 index 0000000..f3dcdc1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/_cmp.pyi @@ -0,0 +1,13 @@ +from typing import Any, Callable, Optional, Type + +_CompareWithType = Callable[[Any, Any], bool] + +def cmp_using( + eq: Optional[_CompareWithType] = ..., + lt: Optional[_CompareWithType] = ..., + le: Optional[_CompareWithType] = ..., + gt: Optional[_CompareWithType] = ..., + ge: Optional[_CompareWithType] = ..., + require_same_type: bool = ..., + class_name: str = ..., +) -> Type: ... diff --git a/venv/lib/python3.10/site-packages/attr/_compat.py b/venv/lib/python3.10/site-packages/attr/_compat.py new file mode 100644 index 0000000..35a85a3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/_compat.py @@ -0,0 +1,176 @@ +# SPDX-License-Identifier: MIT + + +import inspect +import platform +import sys +import threading +import types +import warnings + +from collections.abc import Mapping, Sequence # noqa + + +PYPY = platform.python_implementation() == "PyPy" +PY310 = sys.version_info[:2] >= (3, 10) +PY_3_12_PLUS = sys.version_info[:2] >= (3, 12) + + +def just_warn(*args, **kw): + warnings.warn( + "Running interpreter doesn't sufficiently support code object " + "introspection. Some features like bare super() or accessing " + "__class__ will not work with slotted classes.", + RuntimeWarning, + stacklevel=2, + ) + + +class _AnnotationExtractor: + """ + Extract type annotations from a callable, returning None whenever there + is none. + """ + + __slots__ = ["sig"] + + def __init__(self, callable): + try: + self.sig = inspect.signature(callable) + except (ValueError, TypeError): # inspect failed + self.sig = None + + def get_first_param_type(self): + """ + Return the type annotation of the first argument if it's not empty. + """ + if not self.sig: + return None + + params = list(self.sig.parameters.values()) + if params and params[0].annotation is not inspect.Parameter.empty: + return params[0].annotation + + return None + + def get_return_type(self): + """ + Return the return type if it's not empty. + """ + if ( + self.sig + and self.sig.return_annotation is not inspect.Signature.empty + ): + return self.sig.return_annotation + + return None + + +def make_set_closure_cell(): + """Return a function of two arguments (cell, value) which sets + the value stored in the closure cell `cell` to `value`. + """ + # pypy makes this easy. (It also supports the logic below, but + # why not do the easy/fast thing?) + if PYPY: + + def set_closure_cell(cell, value): + cell.__setstate__((value,)) + + return set_closure_cell + + # Otherwise gotta do it the hard way. + + # Create a function that will set its first cellvar to `value`. + def set_first_cellvar_to(value): + x = value + return + + # This function will be eliminated as dead code, but + # not before its reference to `x` forces `x` to be + # represented as a closure cell rather than a local. + def force_x_to_be_a_cell(): # pragma: no cover + return x + + try: + # Extract the code object and make sure our assumptions about + # the closure behavior are correct. + co = set_first_cellvar_to.__code__ + if co.co_cellvars != ("x",) or co.co_freevars != (): + raise AssertionError # pragma: no cover + + # Convert this code object to a code object that sets the + # function's first _freevar_ (not cellvar) to the argument. + if sys.version_info >= (3, 8): + + def set_closure_cell(cell, value): + cell.cell_contents = value + + else: + args = [co.co_argcount] + args.append(co.co_kwonlyargcount) + args.extend( + [ + co.co_nlocals, + co.co_stacksize, + co.co_flags, + co.co_code, + co.co_consts, + co.co_names, + co.co_varnames, + co.co_filename, + co.co_name, + co.co_firstlineno, + co.co_lnotab, + # These two arguments are reversed: + co.co_cellvars, + co.co_freevars, + ] + ) + set_first_freevar_code = types.CodeType(*args) + + def set_closure_cell(cell, value): + # Create a function using the set_first_freevar_code, + # whose first closure cell is `cell`. Calling it will + # change the value of that cell. + setter = types.FunctionType( + set_first_freevar_code, {}, "setter", (), (cell,) + ) + # And call it to set the cell. + setter(value) + + # Make sure it works on this interpreter: + def make_func_with_cell(): + x = None + + def func(): + return x # pragma: no cover + + return func + + cell = make_func_with_cell().__closure__[0] + set_closure_cell(cell, 100) + if cell.cell_contents != 100: + raise AssertionError # pragma: no cover + + except Exception: + return just_warn + else: + return set_closure_cell + + +set_closure_cell = make_set_closure_cell() + +# Thread-local global to track attrs instances which are already being repr'd. +# This is needed because there is no other (thread-safe) way to pass info +# about the instances that are already being repr'd through the call stack +# in order to ensure we don't perform infinite recursion. +# +# For instance, if an instance contains a dict which contains that instance, +# we need to know that we're already repr'ing the outside instance from within +# the dict's repr() call. +# +# This lives here rather than in _make.py so that the functions in _make.py +# don't have a direct reference to the thread-local in their globals dict. +# If they have such a reference, it breaks cloudpickle. +repr_context = threading.local() diff --git a/venv/lib/python3.10/site-packages/attr/_config.py b/venv/lib/python3.10/site-packages/attr/_config.py new file mode 100644 index 0000000..96d4200 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/_config.py @@ -0,0 +1,31 @@ +# SPDX-License-Identifier: MIT + + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. + """ + if not isinstance(run, bool): + raise TypeError("'run' must be bool.") + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. + """ + return _run_validators diff --git a/venv/lib/python3.10/site-packages/attr/_funcs.py b/venv/lib/python3.10/site-packages/attr/_funcs.py new file mode 100644 index 0000000..1f573c1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/_funcs.py @@ -0,0 +1,418 @@ +# SPDX-License-Identifier: MIT + + +import copy + +from ._make import NOTHING, _obj_setattr, fields +from .exceptions import AttrsAttributeNotFoundError + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, + value_serializer=None, +): + """ + Return the ``attrs`` attribute values of *inst* as a dict. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable dict_factory: A callable to produce dictionaries from. For + example, to produce ordered dictionaries instead of normal Python + dictionaries, pass in ``collections.OrderedDict``. + :param bool retain_collection_types: Do not convert to ``list`` when + encountering an attribute whose type is ``tuple`` or ``set``. Only + meaningful if ``recurse`` is ``True``. + :param Optional[callable] value_serializer: A hook that is called for every + attribute or dict key/value. It receives the current instance, field + and value and must return the (updated) value. The hook is run *after* + the optional *filter* has been applied. + + :rtype: return type of *dict_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + .. versionadded:: 20.3.0 *value_serializer* + .. versionadded:: 21.3.0 If a dict has a collection for a key, it is + serialized as a tuple. + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + + if value_serializer is not None: + v = value_serializer(inst, a, v) + + if recurse is True: + if has(v.__class__): + rv[a.name] = asdict( + v, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain_collection_types is True else list + rv[a.name] = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in v + ] + ) + elif isinstance(v, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in v.items() + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything( + val, + is_key, + filter, + dict_factory, + retain_collection_types, + value_serializer, +): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict( + val, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(val, (tuple, list, set, frozenset)): + if retain_collection_types is True: + cf = val.__class__ + elif is_key: + cf = tuple + else: + cf = list + + rv = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in val.items() + ) + else: + rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the ``attrs`` attribute values of *inst* as a tuple. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable tuple_factory: A callable to produce tuples from. For + example, to produce lists instead of tuples. + :param bool retain_collection_types: Do not convert to ``list`` + or ``dict`` when encountering an attribute which type is + ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is + ``True``. + + :rtype: return type of *tuple_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain is True else list + rv.append( + cf( + [ + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + for j in v + ] + ) + ) + elif isinstance(v, dict): + df = v.__class__ if retain is True else dict + rv.append( + df( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk, + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv, + ) + for kk, vv in v.items() + ) + ) + else: + rv.append(v) + else: + rv.append(v) + + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with ``attrs`` attributes. + + :param type cls: Class to introspect. + :raise TypeError: If *cls* is not a class. + + :rtype: bool + """ + return getattr(cls, "__attrs_attrs__", None) is not None + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't + be found on *cls*. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. deprecated:: 17.1.0 + Use `attrs.evolve` instead if you can. + This function will not be removed du to the slightly different approach + compared to `attrs.evolve`. + """ + import warnings + + warnings.warn( + "assoc is deprecated and will be removed after 2018/01.", + DeprecationWarning, + stacklevel=2, + ) + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in changes.items(): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + raise AttrsAttributeNotFoundError( + f"{k} is not an attrs attribute on {new.__class__}." + ) + _obj_setattr(new, k, v) + return new + + +def evolve(inst, **changes): + """ + Create a new instance, based on *inst* with *changes* applied. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise TypeError: If *attr_name* couldn't be found in the class + ``__init__``. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 17.1.0 + """ + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = a.alias + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + + return cls(**changes) + + +def resolve_types(cls, globalns=None, localns=None, attribs=None): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in `Attribute`'s *type* + field. In other words, you don't need to resolve your types if you only + use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, e.g. if the name only exists + inside a method, you may pass *globalns* or *localns* to specify other + dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + :param type cls: Class to resolve. + :param Optional[dict] globalns: Dictionary containing global variables. + :param Optional[dict] localns: Dictionary containing local variables. + :param Optional[list] attribs: List of attribs for the given class. + This is necessary when calling from inside a ``field_transformer`` + since *cls* is not an ``attrs`` class yet. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class and you didn't pass any attribs. + :raise NameError: If types cannot be resolved because of missing variables. + + :returns: *cls* so you can use this function also as a class decorator. + Please note that you have to apply it **after** `attrs.define`. That + means the decorator has to come in the line **before** `attrs.define`. + + .. versionadded:: 20.1.0 + .. versionadded:: 21.1.0 *attribs* + + """ + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + if getattr(cls, "__attrs_types_resolved__", None) != cls: + import typing + + hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) + for field in fields(cls) if attribs is None else attribs: + if field.name in hints: + # Since fields have been frozen we must work around it. + _obj_setattr(field, "type", hints[field.name]) + # We store the class we resolved so that subclasses know they haven't + # been resolved. + cls.__attrs_types_resolved__ = cls + + # Return the class so you can use it as a decorator too. + return cls diff --git a/venv/lib/python3.10/site-packages/attr/_make.py b/venv/lib/python3.10/site-packages/attr/_make.py new file mode 100644 index 0000000..9ee2200 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/_make.py @@ -0,0 +1,2965 @@ +# SPDX-License-Identifier: MIT + +import copy +import enum +import linecache +import sys +import types +import typing + +from operator import itemgetter + +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters +from ._compat import PY310, PYPY, _AnnotationExtractor, set_closure_cell +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + UnannotatedAttributeError, +) + + +# This is used at least twice, so cache it here. +_obj_setattr = object.__setattr__ +_init_converter_pat = "__attr_converter_%s" +_init_factory_pat = "__attr_factory_%s" +_classvar_prefixes = ( + "typing.ClassVar", + "t.ClassVar", + "ClassVar", + "typing_extensions.ClassVar", +) +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_hash_cache_field = "_attrs_cached_hash" + +_empty_metadata_singleton = types.MappingProxyType({}) + +# Unique object for unequivocal getattr() defaults. +_sentinel = object() + +_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate) + + +class _Nothing(enum.Enum): + """ + Sentinel to indicate the lack of a value when ``None`` is ambiguous. + + If extending attrs, you can use ``typing.Literal[NOTHING]`` to show + that a value may be ``NOTHING``. + + .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. + .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant. + """ + + NOTHING = enum.auto() + + def __repr__(self): + return "NOTHING" + + def __bool__(self): + return False + + +NOTHING = _Nothing.NOTHING +""" +Sentinel to indicate the lack of a value when ``None`` is ambiguous. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since ``None`` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + def __reduce__(self, _none_constructor=type(None), _args=()): + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Create a new attribute on a class. + + .. warning:: + + Does *not* do anything unless the class is also decorated with + `attr.s`! + + :param default: A value that is used if an ``attrs``-generated ``__init__`` + is used and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `attrs.Factory`, its callable will be + used to construct a new value (useful for mutable data types like lists + or dicts). + + If a default is not set (or set manually to `attrs.NOTHING`), a value + *must* be supplied when instantiating; otherwise a `TypeError` + will be raised. + + The default can also be set using decorator notation as shown below. + + :type default: Any value + + :param callable factory: Syntactic sugar for + ``default=attr.Factory(factory)``. + + :param validator: `callable` that is called by ``attrs``-generated + ``__init__`` methods after the instance has been initialized. They + receive the initialized instance, the :func:`~attrs.Attribute`, and the + passed value. + + The return value is *not* inspected so the validator has to throw an + exception itself. + + If a `list` is passed, its items are treated as validators and must + all pass. + + Validators can be globally disabled and re-enabled using + `get_run_validators`. + + The validator can also be set using decorator notation as shown below. + + :type validator: `callable` or a `list` of `callable`\\ s. + + :param repr: Include this attribute in the generated ``__repr__`` + method. If ``True``, include the attribute; if ``False``, omit it. By + default, the built-in ``repr()`` function is used. To override how the + attribute value is formatted, pass a ``callable`` that takes a single + value and returns a string. Note that the resulting string is used + as-is, i.e. it will be used directly *instead* of calling ``repr()`` + (the default). + :type repr: a `bool` or a `callable` to use a custom function. + + :param eq: If ``True`` (default), include this attribute in the + generated ``__eq__`` and ``__ne__`` methods that check two instances + for equality. To override how the attribute value is compared, + pass a ``callable`` that takes a single value and returns the value + to be compared. + :type eq: a `bool` or a `callable`. + + :param order: If ``True`` (default), include this attributes in the + generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. + To override how the attribute value is ordered, + pass a ``callable`` that takes a single value and returns the value + to be ordered. + :type order: a `bool` or a `callable`. + + :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the + same value. Must not be mixed with *eq* or *order*. + :type cmp: a `bool` or a `callable`. + + :param Optional[bool] hash: Include this attribute in the generated + ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This + is the correct behavior according the Python spec. Setting this value + to anything else than ``None`` is *discouraged*. + :param bool init: Include this attribute in the generated ``__init__`` + method. It is possible to set this to ``False`` and set a default + value. In that case this attributed is unconditionally initialized + with the specified default value or factory. + :param callable converter: `callable` that is called by + ``attrs``-generated ``__init__`` methods to convert attribute's value + to the desired format. It is given the passed-in value, and the + returned value will be used as the new value of the attribute. The + value is converted before being passed to the validator, if any. + :param metadata: An arbitrary mapping, to be used by third-party + components. See `extending-metadata`. + + :param type: The type of the attribute. Nowadays, the preferred method to + specify the type is using a variable annotation (see :pep:`526`). + This argument is provided for backward compatibility. + Regardless of the approach used, the type will be stored on + ``Attribute.type``. + + Please note that ``attrs`` doesn't do anything with this metadata by + itself. You can use it as part of your own code or for + `static type checking `. + :param kw_only: Make this attribute keyword-only in the generated + ``__init__`` (if ``init`` is ``False``, this parameter is ignored). + :param on_setattr: Allows to overwrite the *on_setattr* setting from + `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. + Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `attr.s`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + :param Optional[str] alias: Override this attribute's parameter name in the + generated ``__init__`` method. If left `None`, default to ``name`` + stripped of leading underscores. See `private-attributes`. + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is ``None`` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated + *convert* to achieve consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed. + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 22.2.0 *alias* + """ + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq, order, True + ) + + if hash is not None and hash is not True and hash is not False: + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + + if factory is not None: + if default is not NOTHING: + raise ValueError( + "The `default` and `factory` arguments are mutually " + "exclusive." + ) + if not callable(factory): + raise ValueError("The `factory` argument must be a callable.") + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + eq_key=eq_key, + order=order, + order_key=order_key, + on_setattr=on_setattr, + alias=alias, + ) + + +def _compile_and_eval(script, globs, locs=None, filename=""): + """ + "Exec" the script with the given global (globs) and local (locs) variables. + """ + bytecode = compile(script, filename, "exec") + eval(bytecode, globs, locs) + + +def _make_method(name, script, filename, globs): + """ + Create the method with the script given and return the method object. + """ + locs = {} + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + else: + filename = f"{base_filename[:-1]}-{count}>" + count += 1 + + _compile_and_eval(script, globs, locs, filename) + + return locs[name] + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = f"{cls_name}Attributes" + attr_class_template = [ + f"class {attr_class_name}(tuple):", + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + f" {attr_name} = _attrs_property(_attrs_itemgetter({i}))" + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + _compile_and_eval("\n".join(attr_class_template), globs) + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + annot = str(annot) + + # Annotation can be quoted. + if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): + annot = annot[1:-1] + + return annot.startswith(_classvar_prefixes) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + """ + attr = getattr(cls, attrib_name, _sentinel) + if attr is _sentinel: + return False + + for base_cls in cls.__mro__[1:]: + a = getattr(base_cls, attrib_name, None) + if attr is a: + return False + + return True + + +def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + if _has_own_attribute(cls, "__annotations__"): + return cls.__annotations__ + + return {} + + +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + *collect_by_mro* is True, collect them in the correct MRO order, otherwise + use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = [(name, ca) for name, ca in these.items()] + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + + if not isinstance(a, _CountingAttr): + if a is NOTHING: + a = attrib() + else: + a = attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + if kw_only: + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] + + attrs = base_attrs + own_attrs + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + raise ValueError( + "No mandatory attributes allowed after an attribute with a " + f"default value or factory. Attribute in question: {a!r}" + ) + + if had_default is False and a.default is not NOTHING: + had_default = True + + if field_transformer is not None: + attrs = field_transformer(cls, attrs) + + # Resolve default field alias after executing field_transformer. + # This allows field_transformer to differentiate between explicit vs + # default aliases and supply their own defaults. + attrs = [ + a.evolve(alias=_default_init_alias_for(a.name)) if not a.alias else a + for a in attrs + ] + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) + + +if PYPY: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError() + +else: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder: + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_pre_init", + "_has_post_init", + "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_wrote_own_setattr", + "_has_custom_setattr", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + getstate_setstate, + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + field_transformer, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = {a.name for a in base_attrs} + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._wrote_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._wrote_own_setattr = True + elif on_setattr in ( + _ng_default_on_setattr, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + on_setattr == _ng_default_on_setattr + and not (has_validator or has_converter) + ) + or (on_setattr == setters.validate and not has_validator) + or (on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + def __repr__(self): + return f"<_ClassBuilder(cls={self._cls.__name__})>" + + if PY310: + import abc + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + + return self.abc.update_abstractmethods( + self._patch_original_class() + ) + + else: + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + + return self._patch_original_class() + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _sentinel) is not _sentinel + ): + try: + delattr(cls, name) + except AttributeError: + # This can happen if a base class defines a class + # variable and we want to set an attribute with the + # same name by using only a type annotation. + pass + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._wrote_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = _obj_setattr + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + cd = { + k: v + for k, v in self._cls_dict.items() + if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") + } + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._wrote_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = _obj_setattr + break + + # Traverse the MRO to collect existing slots + # and check for an existing __weakref__. + existing_slots = dict() + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + existing_slots.update( + { + name: getattr(base_cls, name) + for name in getattr(base_cls, "__slots__", []) + } + ) + + base_names = set(self._base_names) + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + # There are slots for attributes from current class + # that are defined in parent classes. + # As their descriptors may be overridden by a child class, + # we collect them here and update the class dict + reused_slots = { + slot: slot_descriptor + for slot, slot_descriptor in existing_slots.items() + if slot in slot_names + } + slot_names = [name for name in slot_names if name not in reused_slots] + cd.update(reused_slots) + if self._cache_hash: + slot_names.append(_hash_cache_field) + cd["__slots__"] = tuple(slot_names) + + cd["__qualname__"] = self._cls.__qualname__ + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # . + # If a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in cls.__dict__.values(): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + elif isinstance(item, property): + # Workaround for property `super()` shortcut (PY3-only). + # There is no universal way for other descriptors. + closure_cells = getattr(item.fget, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # ValueError: Cell is empty + pass + else: + if match: + set_closure_cell(cell, cls) + + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns, self._cls) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + raise ValueError( + "__str__ can only be generated if a __repr__ exists." + ) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return {name: getattr(self, name) for name in state_attr_names} + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _obj_setattr.__get__(self) + for name in state_attr_names: + if name in state: + __bound_setattr(name, state[name]) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_hash_cache_field, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=False, + ) + ) + + return self + + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + + def add_attrs_init(self): + self._cls_dict["__attrs_init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=True, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) + ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + raise ValueError( + "Can't combine custom __setattr__ with on_setattr hooks." + ) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _obj_setattr(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._wrote_own_setattr = True + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + try: + method.__module__ = self._cls.__module__ + except AttributeError: + pass + + try: + method.__qualname__ = ".".join( + (self._cls.__qualname__, method.__name__) + ) + except AttributeError: + pass + + try: + method.__doc__ = ( + "Method generated by attrs for class " + f"{self._cls.__qualname__}." + ) + except AttributeError: + pass + + return method + + +def _determine_attrs_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, order + + +def _determine_attrib_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + def decide_callable_or_boolean(value): + """ + Decide whether a key function is used. + """ + if callable(value): + value, key = True, value + else: + key = None + return value, key + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + cmp, cmp_key = decide_callable_or_boolean(cmp) + return cmp, cmp_key, cmp, cmp_key + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq, eq_key = default_eq, None + else: + eq, eq_key = decide_callable_or_boolean(eq) + + if order is None: + order, order_key = eq, eq_key + else: + order, order_key = decide_callable_or_boolean(order) + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, eq_key, order, order_key + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, + unsafe_hash=None, +): + r""" + A class decorator that adds :term:`dunder methods` according to the + specified attributes using `attr.ib` or the *these* argument. + + :param these: A dictionary of name to `attr.ib` mappings. This is + useful to avoid the definition of your attributes within the class body + because you can't (e.g. if you want to add ``__repr__`` methods to + Django models) or don't want to. + + If *these* is not ``None``, ``attrs`` will *not* search the class body + for attributes and will *not* remove any attributes from it. + + The order is deduced from the order of the attributes inside *these*. + + :type these: `dict` of `str` to `attr.ib` + + :param str repr_ns: When using nested classes, there's no way in Python 2 + to automatically detect that. Therefore it's possible to set the + namespace explicitly for a more meaningful ``repr`` output. + :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, + *order*, and *hash* arguments explicitly, assume they are set to + ``True`` **unless any** of the involved methods for one of the + arguments is implemented in the *current* class (i.e. it is *not* + inherited from some base class). + + So for example by implementing ``__eq__`` on a class yourself, + ``attrs`` will deduce ``eq=False`` and will create *neither* + ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible + ``__ne__`` by default, so it *should* be enough to only implement + ``__eq__`` in most cases). + + .. warning:: + + If you prevent ``attrs`` from creating the ordering methods for you + (``order=False``, e.g. by implementing ``__le__``), it becomes + *your* responsibility to make sure its ordering is sound. The best + way is to use the `functools.total_ordering` decorator. + + + Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, + *cmp*, or *hash* overrides whatever *auto_detect* would determine. + + :param bool repr: Create a ``__repr__`` method with a human readable + representation of ``attrs`` attributes.. + :param bool str: Create a ``__str__`` method that is identical to + ``__repr__``. This is usually not necessary except for + `Exception`\ s. + :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` + and ``__ne__`` methods that check two instances for equality. + + They compare the instances as if they were tuples of their ``attrs`` + attributes if and only if the types of both classes are *identical*! + :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, + ``__gt__``, and ``__ge__`` methods that behave like *eq* above and + allow instances to be ordered. If ``None`` (default) mirror value of + *eq*. + :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq* + and *order* to the same value. Must not be mixed with *eq* or *order*. + :param Optional[bool] unsafe_hash: If ``None`` (default), the ``__hash__`` + method is generated according how *eq* and *frozen* are set. + + 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to + None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning the + ``__hash__`` method of the base class will be used (if base class is + ``object``, this means it will fall back to id-based hashing.). + + Although not recommended, you can decide for yourself and force + ``attrs`` to create one (e.g. if the class is immutable even though you + didn't freeze it programmatically) by passing ``True`` or not. Both of + these cases are rather special and should be used carefully. + + See our documentation on `hashing`, Python's documentation on + `object.__hash__`, and the `GitHub issue that led to the default \ + behavior `_ for more + details. + :param Optional[bool] hash: Alias for *unsafe_hash*. *unsafe_hash* takes + precedence. + :param bool init: Create a ``__init__`` method that initializes the + ``attrs`` attributes. Leading underscores are stripped for the argument + name. If a ``__attrs_pre_init__`` method exists on the class, it will + be called before the class is initialized. If a ``__attrs_post_init__`` + method exists on the class, it will be called after the class is fully + initialized. + + If ``init`` is ``False``, an ``__attrs_init__`` method will be + injected instead. This allows you to define a custom ``__init__`` + method that can do pre-init work such as ``super().__init__()``, + and then call ``__attrs_init__()`` and ``__attrs_post_init__()``. + :param bool slots: Create a :term:`slotted class ` that's + more memory-efficient. Slotted classes are generally superior to the + default dict classes, but have some gotchas you should know about, so + we encourage you to read the :term:`glossary entry `. + :param bool frozen: Make instances immutable after initialization. If + someone attempts to modify a frozen instance, + `attr.exceptions.FrozenInstanceError` is raised. + + .. note:: + + 1. This is achieved by installing a custom ``__setattr__`` method + on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other words: + ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You can + circumvent that limitation by using + ``object.__setattr__(self, "attribute_name", value)``. + + 5. Subclasses of a frozen class are frozen too. + + :param bool weakref_slot: Make instances weak-referenceable. This has no + effect unless ``slots`` is also enabled. + :param bool auto_attribs: If ``True``, collect :pep:`526`-annotated + attributes from the class body. + + In this case, you **must** annotate every field. If ``attrs`` + encounters a field that is set to an `attr.ib` but lacks a type + annotation, an `attr.exceptions.UnannotatedAttributeError` is + raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't + want to set a type. + + If you assign a value to those attributes (e.g. ``x: int = 42``), that + value becomes the default value like if it were passed using + ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also + works as expected in most cases (see warning below). + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `attr.ib` are **ignored**. + + .. warning:: + For features that use the attribute name to create decorators (e.g. + `validators `), you still *must* assign `attr.ib` to + them. Otherwise Python will either not find the name or try to use + the default value to call e.g. ``validator`` on it. + + These errors can be quite confusing and probably the most common bug + report on our bug tracker. + + :param bool kw_only: Make all attributes keyword-only + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param bool cache_hash: Ensure that the object's hash code is computed + only once and stored on the object. If this is set to ``True``, + hashing must be either explicitly or implicitly enabled for this + class. If the hash code is cached, avoid any reassignments of + fields involved in hash code computation or mutations of the objects + those fields point to after object creation. If such changes occur, + the behavior of the object's hash code is undefined. + :param bool auto_exc: If the class subclasses `BaseException` + (which implicitly includes any subclass of any exception), the + following happens to behave like a well-behaved Python exceptions + class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids (N.B. ``attrs`` will + *not* remove existing implementations of ``__hash__`` or the equality + methods. It just won't add own ones.), + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the ``args`` + attribute, + - the value of *str* is ignored leaving ``__str__`` to base classes. + :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` + collects attributes from base classes. The default behavior is + incorrect in certain cases of multiple inheritance. It should be on by + default but is kept off for backward-compatibility. + + See issue `#428 `_ for + more details. + + :param Optional[bool] getstate_setstate: + .. note:: + This is usually only interesting for slotted classes and you should + probably just set *auto_detect* to `True`. + + If `True`, ``__getstate__`` and + ``__setstate__`` are generated and attached to the class. This is + necessary for slotted classes to be pickleable. If left `None`, it's + `True` by default for slotted classes and ``False`` for dict classes. + + If *auto_detect* is `True`, and *getstate_setstate* is left `None`, + and **either** ``__getstate__`` or ``__setstate__`` is detected directly + on the class (i.e. not inherited), it is set to `False` (this is usually + what you want). + + :param on_setattr: A callable that is run whenever the user attempts to set + an attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments + as validators: the instance, the attribute that is being modified, and + the new value. + + If no exception is raised, the attribute is set to the return value of + the callable. + + If a list of callables is passed, they're automatically wrapped in an + `attrs.setters.pipe`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + + :param Optional[callable] field_transformer: + A function that is called with the original class object and all + fields right before ``attrs`` finalizes the class. You can use + this, e.g., to automatically add converters or validators to + fields based on their types. See `transform-fields` for more details. + + :param bool match_args: + If `True` (default), set ``__match_args__`` on the class to support + :pep:`634` (Structural Pattern Matching). It is a tuple of all + non-keyword-only ``__init__`` parameter names on Python 3.10 and later. + Ignored on older Python versions. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports ``None`` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* + .. versionchanged:: 21.1.0 + ``init=False`` injects ``__attrs_init__`` + .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + """ + eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) + + # unsafe_hash takes precedence due to PEP 681. + if unsafe_hash is not None: + hash = unsafe_hash + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + raise ValueError("Can't freeze a class with a custom __setattr__.") + + builder = _ClassBuilder( + cls, + these, + slots, + is_frozen, + weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + field_transformer, + ) + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: + builder.add_eq() + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): + builder.add_order() + + builder.add_setattr() + + nonlocal hash + if ( + hash is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + elif hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + builder.make_unhashable() + + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): + builder.add_init() + else: + builder.add_attrs_init() + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " init must be True." + ) + + if ( + PY310 + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ is _frozen_setattrs + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + return ( + f"" + ) + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + # If eq is custom generated, we need to include the functions in globs + globs = {} + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + hash_def += ", *" + + hash_def += ( + ", _cache_wrapper=" + + "__import__('attr._make')._make._CacheHashWrapper):" + ) + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + f" {type_hash},", + ] + ) + + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + globs[cmp_name] = a.eq_key + method_lines.append( + indent + f" {cmp_name}(self.{a.name})," + ) + else: + method_lines.append(indent + f" self.{a.name},") + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + f"if self.{_hash_cache_field} is None:") + if frozen: + append_hash_computation_lines( + f"object.__setattr__(self, '{_hash_cache_field}', ", tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + f"self.{_hash_cache_field} = ", tab * 2 + ) + method_lines.append(tab + f"return self.{_hash_cache_field}") + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + return _make_method("__hash__", script, unique_filename, globs) + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def _make_ne(): + """ + Create __ne__ method. + """ + + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ + + +def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + globs = {} + if attrs: + lines.append(" return (") + others = [" ) == ("] + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + # Add the key function to the global namespace + # of the evaluated function. + globs[cmp_name] = a.eq_key + lines.append(f" {cmp_name}(self.{a.name}),") + others.append(f" {cmp_name}(other.{a.name}),") + else: + lines.append(f" self.{a.name},") + others.append(f" other.{a.name},") + + lines += others + [" )"] + else: + lines.append(" return True") + + script = "\n".join(lines) + + return _make_method("__eq__", script, unique_filename, globs) + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return tuple( + key(value) if key else value + for value, key in ( + (getattr(obj, a.name), a.order_key) for a in attrs + ) + ) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() + + return cls + + +def _make_repr(attrs, ns, cls): + unique_filename = _generate_unique_filename(cls, "repr") + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name if i else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) + + if ns is None: + cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + else: + cls_name_fragment = ns + ".{self.__class__.__name__}" + + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + f" return f'{cls_name_fragment}({repr_fragment})'", + " finally:", + " already_repring.remove(id(self))", + ] + + return _make_method( + "__repr__", "\n".join(lines), unique_filename, globs=globs + ) + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns, cls) + return cls + + +def fields(cls): + """ + Return the tuple of ``attrs`` attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: tuple (with name accessors) of `attrs.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + """ + if not isinstance(cls, type): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError(f"{cls!r} is not an attrs-decorated class.") + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of ``attrs`` attributes for a class, whose + keys are the attribute names. + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: dict + + .. versionadded:: 18.1.0 + """ + if not isinstance(cls, type): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError(f"{cls!r} is not an attrs-decorated class.") + return {a.name: a for a in attrs} + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + :param inst: Instance of a class with ``attrs`` attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_cls(cls): + return "__slots__" in cls.__dict__ + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) + + +def _make_init( + cls, + attrs, + pre_init, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + cls_on_setattr, + attrs_init, +): + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = True + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, + ) + if cls.__module__ in sys.modules: + # This makes typing.get_type_hints(CLS.__init__) resolve string types. + globs.update(sys.modules[cls.__module__].__dict__) + + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr_get"] = _obj_setattr.__get__ + + init = _make_method( + "__attrs_init__" if attrs_init else "__init__", + script, + unique_filename, + globs, + ) + init.__annotations__ = annotations + + return init + + +def _setattr(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return f"_setattr('{attr_name}', {value_var})" + + +def _setattr_with_converter(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return "_setattr('%s', %s(%s))" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _assign(attr_name, value, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return f"self.{attr_name} = {value}" + + +def _assign_with_converter(attr_name, value_var, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True) + + return "self.%s = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _attrs_to_init_script( + attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, +): + """ + Return a script of an initializer for *attrs* and a dict of globals. + + The globals are expected by the generated script. + + If *frozen* is True, we cannot set the attributes directly so we use + a cached ``object.__setattr__``. + """ + lines = [] + if pre_init: + lines.append("self.__attrs_pre_init__()") + + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. + # Note _setattr will be used again below if cache_hash is True + "_setattr = _cached_setattr_get(self)" + ) + + if frozen is True: + if slots is True: + fmt_setter = _setattr + fmt_setter_with_converter = _setattr_with_converter + else: + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + lines.append("_inst_dict = self.__dict__") + + def fmt_setter(attr_name, value_var, has_on_setattr): + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return f"_inst_dict['{attr_name}'] = {value_var}" + + def fmt_setter_with_converter( + attr_name, value_var, has_on_setattr + ): + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr + ) + + return "_inst_dict['%s'] = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + else: + # Not frozen. + fmt_setter = _assign + fmt_setter_with_converter = _assign_with_converter + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_cls_on_setattr + ) + # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not + # explicitly provided + arg_name = a.alias + + has_factory = isinstance(a.default, Factory) + if has_factory and a.default.takes_self: + maybe_self = "self" + else: + maybe_self = "" + + if a.init is False: + if has_factory: + init_factory_name = _init_factory_pat % (a.name,) + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = f"{arg_name}=attr_dict['{attr_name}'].default" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = f"{arg_name}=NOTHING" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append(f"if {arg_name} is not NOTHING:") + + init_factory_name = _init_factory_pat % (a.name,) + if a.converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True: + if a.type is not None and a.converter is None: + annotations[arg_name] = a.type + elif a.converter is not None: + # Try to get the type from the converter. + t = _AnnotationExtractor(a.converter).get_first_param_type() + if t: + annotations[arg_name] = t + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append(f" {val_name}(self, {attr_name}, self.{a.name})") + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if post_init: + lines.append("self.__attrs_post_init__()") + + # because this is set only after __attrs_post_init__ is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to + # field values during post-init combined with post-init accessing the + # hash code would result in silent bugs. + if cache_hash: + if frozen: + if slots: + # if frozen and slots, then _setattr defined above + init_hash_cache = "_setattr('%s', %s)" + else: + # if frozen and not slots, then _inst_dict defined above + init_hash_cache = "_inst_dict['%s'] = %s" + else: + init_hash_cache = "self.%s = %s" + lines.append(init_hash_cache % (_hash_cache_field, "None")) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join(f"self.{a.name}" for a in attrs if a.init) + + lines.append(f"BaseException.__init__(self, {vals})") + + args = ", ".join(args) + if kw_only_args: + args += "%s*, %s" % ( + ", " if args else "", # leading comma + ", ".join(kw_only_args), # kw_only args + ) + + return ( + "def %s(self, %s):\n %s\n" + % ( + ("__attrs_init__" if attrs_init else "__init__"), + args, + "\n ".join(lines) if lines else "pass", + ), + names_for_globals, + annotations, + ) + + +def _default_init_alias_for(name: str) -> str: + """ + The default __init__ parameter name for a field. + + This performs private-name adjustment via leading-unscore stripping, + and is the default value of Attribute.alias if not provided. + """ + + return name.lstrip("_") + + +class Attribute: + """ + *Read-only* representation of an attribute. + + The class has *all* arguments of `attr.ib` (except for ``factory`` + which is only syntactic sugar for ``default=Factory(...)`` plus the + following: + + - ``name`` (`str`): The name of the attribute. + - ``alias`` (`str`): The __init__ parameter name of the attribute, after + any explicit overrides and default private-attribute-name handling. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables + that are used for comparing and ordering objects by this attribute, + respectively. These are set by passing a callable to `attr.ib`'s ``eq``, + ``order``, or ``cmp`` arguments. See also :ref:`comparison customization + `. + + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The :ref:`field transformer ` hook receives a list of + them. + - The ``alias`` property exposes the __init__ parameter name of the field, + with any overrides and default private-attribute handling applied. + + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + .. versionadded:: 21.1.0 *eq_key* and *order_key* + .. versionadded:: 22.2.0 *alias* + + For the full version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + "alias", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + eq_key=None, + order=None, + order_key=None, + on_setattr=None, + alias=None, + ): + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq_key or eq, order_key or order, True + ) + + # Cache this descriptor here to speed things up later. + bound_setattr = _obj_setattr.__get__(self) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("eq_key", eq_key) + bound_setattr("order", order) + bound_setattr("order_key", order_key) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + types.MappingProxyType(dict(metadata)) # Shallow copy + if metadata + else _empty_metadata_singleton + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + bound_setattr("alias", alias) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + raise ValueError( + "Type annotation and type argument cannot both be present" + ) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "inherited", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + inherited=False, + **inst_dict, + ) + + # Don't use attr.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): + """ + Copy *self* and apply *changes*. + + This works similarly to `attr.evolve` but that function does not work + with ``Attribute``. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _obj_setattr.__get__(self) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + types.MappingProxyType(dict(value)) + if value + else _empty_metadata_singleton, + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + alias=_default_init_alias_for(name), + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr: + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + "on_setattr", + "alias", + ) + __attrs_attrs__ = tuple( + Attribute( + name=name, + alias=_default_init_alias_for(name), + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + "alias", + ) + ) + ( + Attribute( + name="metadata", + alias="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + eq_key, + order, + order_key, + on_setattr, + alias, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.eq_key = eq_key + self.order = order + self.order_key = order_key + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + self.alias = alias + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + :raises DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +class Factory: + """ + Stores a factory callable. + + If passed as the default value to `attrs.field`, the factory is used to + generate a new value. + + :param callable factory: A callable that takes either none or exactly one + mandatory positional argument depending on *takes_self*. + :param bool takes_self: Pass the partially initialized instance that is + being initialized as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + __slots__ = ("factory", "takes_self") + + def __init__(self, factory, takes_self=False): + """ + `Factory` is part of the default machinery so if we want a default + value here, we have to implement it ourselves. + """ + self.factory = factory + self.takes_self = takes_self + + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple(getattr(self, name) for name in self.__slots__) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + for name, value in zip(self.__slots__, state): + setattr(self, name, value) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in Factory.__slots__ +] + +Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) + + +def make_class(name, attrs, bases=(object,), **attributes_arguments): + """ + A quick way to create a new class called *name* with *attrs*. + + :param str name: The name for the new class. + + :param attrs: A list of names or a dictionary of mappings of names to + attributes. + + The order is deduced from the order of the names or attributes inside + *attrs*. Otherwise the order of the definition of the attributes is + used. + :type attrs: `list` or `dict` + + :param tuple bases: Classes that the new class will subclass. + + :param attributes_arguments: Passed unmodified to `attr.s`. + + :return: A new class with *attrs*. + :rtype: type + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = {a: attrib() for a in attrs} + else: + raise TypeError("attrs argument must be a dict or a list.") + + pre_init = cls_dict.pop("__attrs_pre_init__", None) + post_init = cls_dict.pop("__attrs_post_init__", None) + user_init = cls_dict.pop("__init__", None) + + body = {} + if pre_init is not None: + body["__attrs_pre_init__"] = pre_init + if post_init is not None: + body["__attrs_post_init__"] = post_init + if user_init is not None: + body["__init__"] = user_init + + type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body)) + + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + except (AttributeError, ValueError): + pass + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_attrs_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + return _attrs(these=cls_dict, **attributes_arguments)(type_) + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, hash=True) +class _AndValidator: + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + :param callables validators: Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + Type annotations will be inferred from the wrapped converters', if + they have any. + + :param callables converters: Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val): + for converter in converters: + val = converter(val) + + return val + + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = typing.TypeVar("A") + pipe_converter.__annotations__ = {"val": A, "return": A} + else: + # Get parameter type from first converter. + t = _AnnotationExtractor(converters[0]).get_first_param_type() + if t: + pipe_converter.__annotations__["val"] = t + + # Get return type from last converter. + rt = _AnnotationExtractor(converters[-1]).get_return_type() + if rt: + pipe_converter.__annotations__["return"] = rt + + return pipe_converter diff --git a/venv/lib/python3.10/site-packages/attr/_next_gen.py b/venv/lib/python3.10/site-packages/attr/_next_gen.py new file mode 100644 index 0000000..c59d848 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/_next_gen.py @@ -0,0 +1,226 @@ +# SPDX-License-Identifier: MIT + +""" +These are keyword-only APIs that call `attr.s` and `attr.ib` with different +default values. +""" + + +from functools import partial + +from . import setters +from ._funcs import asdict as _asdict +from ._funcs import astuple as _astuple +from ._make import ( + NOTHING, + _frozen_setattrs, + _ng_default_on_setattr, + attrib, + attrs, +) +from .exceptions import UnannotatedAttributeError + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + unsafe_hash=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + Define an ``attrs`` class. + + Differences to the classic `attr.s` that it uses underneath: + + - Automatically detect whether or not *auto_attribs* should be `True` (c.f. + *auto_attribs* parameter). + - If *frozen* is `False`, run converters and validators when setting an + attribute by default. + - *slots=True* + + .. caution:: + + Usually this has only upsides and few visible effects in everyday + programming. But it *can* lead to some suprising behaviors, so please + make sure to read :term:`slotted classes`. + - *auto_exc=True* + - *auto_detect=True* + - *order=False* + - Some options that were only relevant on Python 2 or were kept around for + backwards-compatibility have been removed. + + Please note that these are all defaults and you can change them as you + wish. + + :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves + exactly like `attr.s`. If left `None`, `attr.s` will try to guess: + + 1. If any attributes are annotated and no unannotated `attrs.fields`\ s + are found, it assumes *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attrs.fields`\ s. + + For now, please refer to `attr.s` for the rest of the parameters. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + unsafe_hash=unsafe_hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + field_transformer=field_transformer, + match_args=match_args, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes convert & validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = _ng_default_on_setattr + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + raise ValueError( + "Frozen classes can't use on_setattr " + "(frozen-ness was inherited)." + ) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Identical to `attr.ib`, except keyword-only and with some arguments + removed. + + .. versionadded:: 20.1.0 + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + alias=alias, + ) + + +def asdict(inst, *, recurse=True, filter=None, value_serializer=None): + """ + Same as `attr.asdict`, except that collections types are always retained + and dict is always used as *dict_factory*. + + .. versionadded:: 21.3.0 + """ + return _asdict( + inst=inst, + recurse=recurse, + filter=filter, + value_serializer=value_serializer, + retain_collection_types=True, + ) + + +def astuple(inst, *, recurse=True, filter=None): + """ + Same as `attr.astuple`, except that collections types are always retained + and `tuple` is always used as the *tuple_factory*. + + .. versionadded:: 21.3.0 + """ + return _astuple( + inst=inst, recurse=recurse, filter=filter, retain_collection_types=True + ) diff --git a/venv/lib/python3.10/site-packages/attr/_typing_compat.pyi b/venv/lib/python3.10/site-packages/attr/_typing_compat.pyi new file mode 100644 index 0000000..ca7b71e --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/_typing_compat.pyi @@ -0,0 +1,15 @@ +from typing import Any, ClassVar, Protocol + +# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`. +MYPY = False + +if MYPY: + # A protocol to be able to statically accept an attrs class. + class AttrsInstance_(Protocol): + __attrs_attrs__: ClassVar[Any] + +else: + # For type checkers without plug-in support use an empty protocol that + # will (hopefully) be combined into a union. + class AttrsInstance_(Protocol): + pass diff --git a/venv/lib/python3.10/site-packages/attr/_version_info.py b/venv/lib/python3.10/site-packages/attr/_version_info.py new file mode 100644 index 0000000..51a1312 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/_version_info.py @@ -0,0 +1,86 @@ +# SPDX-License-Identifier: MIT + + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo: + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them diff --git a/venv/lib/python3.10/site-packages/attr/_version_info.pyi b/venv/lib/python3.10/site-packages/attr/_version_info.pyi new file mode 100644 index 0000000..45ced08 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/venv/lib/python3.10/site-packages/attr/converters.py b/venv/lib/python3.10/site-packages/attr/converters.py new file mode 100644 index 0000000..4cada10 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/converters.py @@ -0,0 +1,144 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful converters. +""" + + +import typing + +from ._compat import _AnnotationExtractor +from ._make import NOTHING, Factory, pipe + + +__all__ = [ + "default_if_none", + "optional", + "pipe", + "to_bool", +] + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to ``None``. + + Type annotations will be inferred from the wrapped converter's, if it + has any. + + :param callable converter: the converter that is used for non-``None`` + values. + + .. versionadded:: 17.1.0 + """ + + def optional_converter(val): + if val is None: + return None + return converter(val) + + xtr = _AnnotationExtractor(converter) + + t = xtr.get_first_param_type() + if t: + optional_converter.__annotations__["val"] = typing.Optional[t] + + rt = xtr.get_return_type() + if rt: + optional_converter.__annotations__["return"] = typing.Optional[rt] + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace ``None`` values by *default* or the + result of *factory*. + + :param default: Value to be used if ``None`` is passed. Passing an instance + of `attrs.Factory` is supported, however the ``takes_self`` option + is *not*. + :param callable factory: A callable that takes no parameters whose result + is used if ``None`` is passed. + + :raises TypeError: If **neither** *default* or *factory* is passed. + :raises TypeError: If **both** *default* and *factory* are passed. + :raises ValueError: If an instance of `attrs.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + raise TypeError("Must pass either `default` or `factory`.") + + if default is not NOTHING and factory is not None: + raise TypeError( + "Must pass either `default` or `factory` but not both." + ) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + raise ValueError( + "`takes_self` is not supported by default_if_none." + ) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter + + +def to_bool(val): + """ + Convert "boolean" strings (e.g., from env. vars.) to real booleans. + + Values mapping to :code:`True`: + + - :code:`True` + - :code:`"true"` / :code:`"t"` + - :code:`"yes"` / :code:`"y"` + - :code:`"on"` + - :code:`"1"` + - :code:`1` + + Values mapping to :code:`False`: + + - :code:`False` + - :code:`"false"` / :code:`"f"` + - :code:`"no"` / :code:`"n"` + - :code:`"off"` + - :code:`"0"` + - :code:`0` + + :raises ValueError: for any other value. + + .. versionadded:: 21.3.0 + """ + if isinstance(val, str): + val = val.lower() + truthy = {True, "true", "t", "yes", "y", "on", "1", 1} + falsy = {False, "false", "f", "no", "n", "off", "0", 0} + try: + if val in truthy: + return True + if val in falsy: + return False + except TypeError: + # Raised when "val" is not hashable (e.g., lists) + pass + raise ValueError(f"Cannot convert value to bool: {val}") diff --git a/venv/lib/python3.10/site-packages/attr/converters.pyi b/venv/lib/python3.10/site-packages/attr/converters.pyi new file mode 100644 index 0000000..5abb49f --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/converters.pyi @@ -0,0 +1,13 @@ +from typing import Callable, TypeVar, overload + +from . import _ConverterType + +_T = TypeVar("_T") + +def pipe(*validators: _ConverterType) -> _ConverterType: ... +def optional(converter: _ConverterType) -> _ConverterType: ... +@overload +def default_if_none(default: _T) -> _ConverterType: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... +def to_bool(val: str) -> bool: ... diff --git a/venv/lib/python3.10/site-packages/attr/exceptions.py b/venv/lib/python3.10/site-packages/attr/exceptions.py new file mode 100644 index 0000000..5dc51e0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/exceptions.py @@ -0,0 +1,92 @@ +# SPDX-License-Identifier: MIT + + +class FrozenError(AttributeError): + """ + A frozen/immutable instance or attribute have been attempted to be + modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 20.1.0 + """ + + msg = "can't set attribute" + args = [msg] + + +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + +class AttrsAttributeNotFoundError(ValueError): + """ + An ``attrs`` function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-``attrs`` class has been passed into an ``attrs`` function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set using ``attr.ib()`` and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type + annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + It was attempted to use an ``attrs`` feature that requires a newer Python + version. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A ``attr.ib()`` requiring a callable has been set with a value + that is not callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/venv/lib/python3.10/site-packages/attr/exceptions.pyi b/venv/lib/python3.10/site-packages/attr/exceptions.pyi new file mode 100644 index 0000000..f268011 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/exceptions.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class FrozenError(AttributeError): + msg: str = ... + +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/venv/lib/python3.10/site-packages/attr/filters.py b/venv/lib/python3.10/site-packages/attr/filters.py new file mode 100644 index 0000000..baa25e9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/filters.py @@ -0,0 +1,51 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful filters for `attr.asdict`. +""" + +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isinstance(cls, type)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Include *what*. + + :param what: What to include. + :type what: `list` of `type` or `attrs.Attribute`\\ s + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def include_(attribute, value): + return value.__class__ in cls or attribute in attrs + + return include_ + + +def exclude(*what): + """ + Exclude *what*. + + :param what: What to exclude. + :type what: `list` of classes or `attrs.Attribute`\\ s. + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def exclude_(attribute, value): + return value.__class__ not in cls and attribute not in attrs + + return exclude_ diff --git a/venv/lib/python3.10/site-packages/attr/filters.pyi b/venv/lib/python3.10/site-packages/attr/filters.pyi new file mode 100644 index 0000000..9938668 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/filters.pyi @@ -0,0 +1,6 @@ +from typing import Any, Union + +from . import Attribute, _FilterType + +def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... +def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/venv/lib/python3.10/site-packages/attr/py.typed b/venv/lib/python3.10/site-packages/attr/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/attr/setters.py b/venv/lib/python3.10/site-packages/attr/setters.py new file mode 100644 index 0000000..12ed675 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/setters.py @@ -0,0 +1,73 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly used hooks for on_setattr. +""" + + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + return c(new_value) + + return new_value + + +# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. +# autodata stopped working, so the docstring is inlined in the API docs. +NO_OP = object() diff --git a/venv/lib/python3.10/site-packages/attr/setters.pyi b/venv/lib/python3.10/site-packages/attr/setters.pyi new file mode 100644 index 0000000..72f7ce4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/setters.pyi @@ -0,0 +1,19 @@ +from typing import Any, NewType, NoReturn, TypeVar + +from . import Attribute, _OnSetAttrType + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/venv/lib/python3.10/site-packages/attr/validators.py b/venv/lib/python3.10/site-packages/attr/validators.py new file mode 100644 index 0000000..852ae96 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/validators.py @@ -0,0 +1,714 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful validators. +""" + + +import operator +import re + +from contextlib import contextmanager + +from ._config import get_run_validators, set_run_validators +from ._make import _AndValidator, and_, attrib, attrs +from .converters import default_if_none +from .exceptions import NotCallableError + + +try: + Pattern = re.Pattern +except AttributeError: # Python <3.7 lacks a Pattern type. + Pattern = type(re.compile("")) + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "disabled", + "ge", + "get_disabled", + "gt", + "in_", + "instance_of", + "is_callable", + "le", + "lt", + "matches_re", + "max_len", + "min_len", + "not_", + "optional", + "provides", + "set_disabled", +] + + +def set_disabled(disabled): + """ + Globally disable or enable running validators. + + By default, they are run. + + :param disabled: If ``True``, disable running all validators. + :type disabled: bool + + .. warning:: + + This function is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(not disabled) + + +def get_disabled(): + """ + Return a bool indicating whether validators are currently disabled or not. + + :return: ``True`` if validators are currently disabled. + :rtype: bool + + .. versionadded:: 21.3.0 + """ + return not get_run_validators() + + +@contextmanager +def disabled(): + """ + Context manager that disables running validators within its context. + + .. warning:: + + This context manager is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(False) + try: + yield + finally: + set_run_validators(True) + + +@attrs(repr=False, slots=True, hash=True) +class _InstanceOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + raise TypeError( + "'{name}' must be {type!r} (got {value!r} that is a " + "{actual!r}).".format( + name=attr.name, + type=self.type, + actual=value.__class__, + value=value, + ), + attr, + self.type, + value, + ) + + def __repr__(self): + return "".format( + type=self.type + ) + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of type + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected type, and the value it + got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True, slots=True) +class _MatchesReValidator: + pattern = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + raise ValueError( + "'{name}' must match regex {pattern!r}" + " ({value!r} doesn't)".format( + name=attr.name, pattern=self.pattern.pattern, value=value + ), + attr, + self.pattern, + value, + ) + + def __repr__(self): + return "".format( + pattern=self.pattern + ) + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called + with a string that doesn't match *regex*. + + :param regex: a regex string or precompiled pattern to match against + :param int flags: flags that will be passed to the underlying re function + (default 0) + :param callable func: which underlying `re` function to call. Valid options + are `re.fullmatch`, `re.search`, and `re.match`; the default ``None`` + means `re.fullmatch`. For performance reasons, the pattern is always + precompiled using `re.compile`. + + .. versionadded:: 19.2.0 + .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. + """ + valid_funcs = (re.fullmatch, None, re.search, re.match) + if func not in valid_funcs: + raise ValueError( + "'func' must be one of {}.".format( + ", ".join( + sorted( + e and e.__name__ or "None" for e in set(valid_funcs) + ) + ) + ) + ) + + if isinstance(regex, Pattern): + if flags: + raise TypeError( + "'flags' can only be used with a string pattern; " + "pass flags to re.compile() instead" + ) + pattern = regex + else: + pattern = re.compile(regex, flags) + + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + else: + match_func = pattern.fullmatch + + return _MatchesReValidator(pattern, match_func) + + +@attrs(repr=False, slots=True, hash=True) +class _ProvidesValidator: + interface = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.interface.providedBy(value): + raise TypeError( + "'{name}' must provide {interface!r} which {value!r} " + "doesn't.".format( + name=attr.name, interface=self.interface, value=value + ), + attr, + self.interface, + value, + ) + + def __repr__(self): + return "".format( + interface=self.interface + ) + + +def provides(interface): + """ + A validator that raises a `TypeError` if the initializer is called + with an object that does not provide the requested *interface* (checks are + performed using ``interface.providedBy(value)`` (see `zope.interface + `_). + + :param interface: The interface to check for. + :type interface: ``zope.interface.Interface`` + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected interface, and the + value it got. + """ + return _ProvidesValidator(interface) + + +@attrs(repr=False, slots=True, hash=True) +class _OptionalValidator: + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return "".format( + what=repr(self.validator) + ) + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to ``None`` in addition to satisfying the requirements of + the sub-validator. + + :param validator: A validator (or a list of validators) that is used for + non-``None`` values. + :type validator: callable or `list` of callables. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + """ + if isinstance(validator, list): + return _OptionalValidator(_AndValidator(validator)) + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, hash=True) +class _InValidator: + options = attrib() + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + raise ValueError( + "'{name}' must be in {options!r} (got {value!r})".format( + name=attr.name, options=self.options, value=value + ), + attr, + self.options, + value, + ) + + def __repr__(self): + return "".format( + options=self.options + ) + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called + with a value that does not belong in the options provided. The check is + performed using ``value in options``. + + :param options: Allowed options. + :type options: list, tuple, `enum.Enum`, ... + + :raises ValueError: With a human readable error message, the attribute (of + type `attrs.Attribute`), the expected options, and the value it + got. + + .. versionadded:: 17.1.0 + .. versionchanged:: 22.1.0 + The ValueError was incomplete until now and only contained the human + readable error message. Now it contains all the information that has + been promised since 17.1.0. + """ + return _InValidator(options) + + +@attrs(repr=False, slots=False, hash=True) +class _IsCallableValidator: + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attr.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute + that is not callable. + + .. versionadded:: 19.1.0 + + :raises `attr.exceptions.NotCallableError`: With a human readable error + message containing the attribute (`attrs.Attribute`) name, + and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, hash=True) +class _DeepIterable: + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else f" {self.iterable_validator!r}" + ) + return ( + "" + ).format( + iterable_identifier=iterable_identifier, + member=self.member_validator, + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + :param member_validator: Validator(s) to apply to iterable members + :param iterable_validator: Validator to apply to iterable itself + (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + if isinstance(member_validator, (list, tuple)): + member_validator = and_(*member_validator) + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, hash=True) +class _DeepMapping: + key_validator = attrib(validator=is_callable()) + value_validator = attrib(validator=is_callable()) + mapping_validator = attrib(default=None, validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + self.key_validator(inst, attr, key) + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return ( + "" + ).format(key=self.key_validator, value=self.value_validator) + + +def deep_mapping(key_validator, value_validator, mapping_validator=None): + """ + A validator that performs deep validation of a dictionary. + + :param key_validator: Validator to apply to dictionary keys + :param value_validator: Validator to apply to dictionary values + :param mapping_validator: Validator to apply to top-level mapping + attribute (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepMapping(key_validator, value_validator, mapping_validator) + + +@attrs(repr=False, frozen=True, slots=True) +class _NumberValidator: + bound = attrib() + compare_op = attrib() + compare_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.compare_func(value, self.bound): + raise ValueError( + "'{name}' must be {op} {bound}: {value}".format( + name=attr.name, + op=self.compare_op, + bound=self.bound, + value=value, + ) + ) + + def __repr__(self): + return "".format( + op=self.compare_op, bound=self.bound + ) + + +def lt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number larger or equal to *val*. + + :param val: Exclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<", operator.lt) + + +def le(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number greater than *val*. + + :param val: Inclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<=", operator.le) + + +def ge(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller than *val*. + + :param val: Inclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">=", operator.ge) + + +def gt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller or equal to *val*. + + :param val: Exclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">", operator.gt) + + +@attrs(repr=False, frozen=True, slots=True) +class _MaxLengthValidator: + max_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) > self.max_length: + raise ValueError( + "Length of '{name}' must be <= {max}: {len}".format( + name=attr.name, max=self.max_length, len=len(value) + ) + ) + + def __repr__(self): + return f"" + + +def max_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is longer than *length*. + + :param int length: Maximum length of the string or iterable + + .. versionadded:: 21.3.0 + """ + return _MaxLengthValidator(length) + + +@attrs(repr=False, frozen=True, slots=True) +class _MinLengthValidator: + min_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) < self.min_length: + raise ValueError( + "Length of '{name}' must be => {min}: {len}".format( + name=attr.name, min=self.min_length, len=len(value) + ) + ) + + def __repr__(self): + return f"" + + +def min_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is shorter than *length*. + + :param int length: Minimum length of the string or iterable + + .. versionadded:: 22.1.0 + """ + return _MinLengthValidator(length) + + +@attrs(repr=False, slots=True, hash=True) +class _SubclassOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not issubclass(value, self.type): + raise TypeError( + "'{name}' must be a subclass of {type!r} " + "(got {value!r}).".format( + name=attr.name, + type=self.type, + value=value, + ), + attr, + self.type, + value, + ) + + def __repr__(self): + return "".format( + type=self.type + ) + + +def _subclass_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `issubclass` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of types + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected type, and the value it + got. + """ + return _SubclassOfValidator(type) + + +@attrs(repr=False, slots=True, hash=True) +class _NotValidator: + validator = attrib() + msg = attrib( + converter=default_if_none( + "not_ validator child '{validator!r}' " + "did not raise a captured error" + ) + ) + exc_types = attrib( + validator=deep_iterable( + member_validator=_subclass_of(Exception), + iterable_validator=instance_of(tuple), + ), + ) + + def __call__(self, inst, attr, value): + try: + self.validator(inst, attr, value) + except self.exc_types: + pass # suppress error to invert validity + else: + raise ValueError( + self.msg.format( + validator=self.validator, + exc_types=self.exc_types, + ), + attr, + self.validator, + value, + self.exc_types, + ) + + def __repr__(self): + return ( + "" + ).format( + what=self.validator, + exc_types=self.exc_types, + ) + + +def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)): + """ + A validator that wraps and logically 'inverts' the validator passed to it. + It will raise a `ValueError` if the provided validator *doesn't* raise a + `ValueError` or `TypeError` (by default), and will suppress the exception + if the provided validator *does*. + + Intended to be used with existing validators to compose logic without + needing to create inverted variants, for example, ``not_(in_(...))``. + + :param validator: A validator to be logically inverted. + :param msg: Message to raise if validator fails. + Formatted with keys ``exc_types`` and ``validator``. + :type msg: str + :param exc_types: Exception type(s) to capture. + Other types raised by child validators will not be intercepted and + pass through. + + :raises ValueError: With a human readable error message, + the attribute (of type `attrs.Attribute`), + the validator that failed to raise an exception, + the value it got, + and the expected exception types. + + .. versionadded:: 22.2.0 + """ + try: + exc_types = tuple(exc_types) + except TypeError: + exc_types = (exc_types,) + return _NotValidator(validator, msg, exc_types) diff --git a/venv/lib/python3.10/site-packages/attr/validators.pyi b/venv/lib/python3.10/site-packages/attr/validators.pyi new file mode 100644 index 0000000..fd9206d --- /dev/null +++ b/venv/lib/python3.10/site-packages/attr/validators.pyi @@ -0,0 +1,86 @@ +from typing import ( + Any, + AnyStr, + Callable, + Container, + ContextManager, + Iterable, + List, + Mapping, + Match, + Optional, + Pattern, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +from . import _ValidatorType +from . import _ValidatorArgType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +def set_disabled(run: bool) -> None: ... +def get_disabled() -> bool: ... +def disabled() -> ContextManager[None]: ... + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2]] +) -> _ValidatorType[Union[_T1, _T2]]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2], Type[_T3]] +) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... +@overload +def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... +def provides(interface: Any) -> _ValidatorType[Any]: ... +def optional( + validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] +) -> _ValidatorType[Optional[_T]]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: Union[Pattern[AnyStr], AnyStr], + flags: int = ..., + func: Optional[ + Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] + ] = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorArgType[_T], + iterable_validator: Optional[_ValidatorType[_I]] = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: Optional[_ValidatorType[_M]] = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... +def lt(val: _T) -> _ValidatorType[_T]: ... +def le(val: _T) -> _ValidatorType[_T]: ... +def ge(val: _T) -> _ValidatorType[_T]: ... +def gt(val: _T) -> _ValidatorType[_T]: ... +def max_len(length: int) -> _ValidatorType[_T]: ... +def min_len(length: int) -> _ValidatorType[_T]: ... +def not_( + validator: _ValidatorType[_T], + *, + msg: Optional[str] = None, + exc_types: Union[Type[Exception], Iterable[Type[Exception]]] = ... +) -> _ValidatorType[_T]: ... diff --git a/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/LICENSE b/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/LICENSE new file mode 100644 index 0000000..2bd6453 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack and the attrs contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/METADATA new file mode 100644 index 0000000..0f71b57 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/METADATA @@ -0,0 +1,278 @@ +Metadata-Version: 2.1 +Name: attrs +Version: 22.2.0 +Summary: Classes Without Boilerplate +Home-page: https://www.attrs.org/ +Author: Hynek Schlawack +Author-email: hs@ox.cx +Maintainer: Hynek Schlawack +Maintainer-email: hs@ox.cx +License: MIT +Project-URL: Documentation, https://www.attrs.org/ +Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html +Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues +Project-URL: Source Code, https://github.com/python-attrs/attrs +Project-URL: Funding, https://github.com/sponsors/hynek +Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi +Project-URL: Ko-fi, https://ko-fi.com/the_hynek +Keywords: class,attribute,boilerplate,dataclass +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: cov +Requires-Dist: attrs[tests] ; extra == 'cov' +Requires-Dist: coverage-enable-subprocess ; extra == 'cov' +Requires-Dist: coverage[toml] (>=5.3) ; extra == 'cov' +Provides-Extra: dev +Requires-Dist: attrs[docs,tests] ; extra == 'dev' +Provides-Extra: docs +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: myst-parser ; extra == 'docs' +Requires-Dist: zope.interface ; extra == 'docs' +Requires-Dist: sphinx-notfound-page ; extra == 'docs' +Requires-Dist: sphinxcontrib-towncrier ; extra == 'docs' +Requires-Dist: towncrier ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: attrs[tests-no-zope] ; extra == 'tests' +Requires-Dist: zope.interface ; extra == 'tests' +Provides-Extra: tests-no-zope +Requires-Dist: hypothesis ; extra == 'tests-no-zope' +Requires-Dist: pympler ; extra == 'tests-no-zope' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests-no-zope' +Requires-Dist: pytest-xdist[psutil] ; extra == 'tests-no-zope' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests-no-zope' +Requires-Dist: mypy (<0.990,>=0.971) ; (platform_python_implementation == "CPython") and extra == 'tests-no-zope' +Requires-Dist: pytest-mypy-plugins ; (platform_python_implementation == "CPython" and python_version < "3.11") and extra == 'tests-no-zope' +Provides-Extra: tests_no_zope +Requires-Dist: hypothesis ; extra == 'tests_no_zope' +Requires-Dist: pympler ; extra == 'tests_no_zope' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests_no_zope' +Requires-Dist: pytest-xdist[psutil] ; extra == 'tests_no_zope' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests_no_zope' +Requires-Dist: mypy (<0.990,>=0.971) ; (platform_python_implementation == "CPython") and extra == 'tests_no_zope' +Requires-Dist: pytest-mypy-plugins ; (platform_python_implementation == "CPython" and python_version < "3.11") and extra == 'tests_no_zope' + +

    + + attrs + +

    + + +

    + + Documentation + + + License: MIT + + + + + + + Downloads per month + + DOI +

    + + + +*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)). +[Trusted by NASA](https://docs.github.com/en/account-and-profile/setting-up-and-managing-your-github-profile/customizing-your-profile/personalizing-your-profile#list-of-qualifying-repositories-for-mars-2020-helicopter-contributor-achievement) for Mars missions since 2020! + +Its main goal is to help you to write **concise** and **correct** software without slowing down your code. + + +## Sponsors + +*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek). +Especially those generously supporting us at the *The Organization* tier and higher: + +

    + + + + + + + + + + + + + + + +

    + +

    + Please consider joining them to help make attrs’s maintenance more sustainable! +

    + + + +## Example + +*attrs* gives you a class decorator and a way to declaratively define the attributes on that class: + + + +```pycon +>>> from attrs import asdict, define, make_class, Factory + +>>> @define +... class SomeClass: +... a_number: int = 42 +... list_of_numbers: list[int] = Factory(list) +... +... def hard_math(self, another_number): +... return self.a_number + sum(self.list_of_numbers) * another_number + + +>>> sc = SomeClass(1, [1, 2, 3]) +>>> sc +SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) + +>>> sc.hard_math(3) +19 +>>> sc == SomeClass(1, [1, 2, 3]) +True +>>> sc != SomeClass(2, [3, 2, 1]) +True + +>>> asdict(sc) +{'a_number': 1, 'list_of_numbers': [1, 2, 3]} + +>>> SomeClass() +SomeClass(a_number=42, list_of_numbers=[]) + +>>> C = make_class("C", ["a", "b"]) +>>> C("foo", "bar") +C(a='foo', b='bar') +``` + +After *declaring* your attributes, *attrs* gives you: + +- a concise and explicit overview of the class's attributes, +- a nice human-readable `__repr__`, +- equality-checking methods, +- an initializer, +- and much more, + +*without* writing dull boilerplate code again and again and *without* runtime performance penalties. + +**Hate type annotations**!? +No problem! +Types are entirely **optional** with *attrs*. +Simply assign `attrs.field()` to the attributes instead of annotating them with types. + +--- + +This example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0. +The classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**. + +Please check out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for a more in-depth explanation. + + +## Data Classes + +On the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*). +In practice it does a lot more and is more flexible. +For instance it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), or allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization). + +For more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes). + + +## Project Information + +- [**Changelog**](https://www.attrs.org/en/stable/changelog.html) +- [**Documentation**](https://www.attrs.org/) +- [**PyPI**](https://pypi.org/project/attrs/) +- [**Source Code**](https://github.com/python-attrs/attrs) +- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md) +- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs) +- **License**: [MIT](https://www.attrs.org/en/latest/license.html) +- **Get Help**: please use the `python-attrs` tag on [StackOverflow](https://stackoverflow.com/questions/tagged/python-attrs) +- **Supported Python Versions**: 3.6 and later + + +### *attrs* for Enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. +[Learn more.](https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) + + +## Changes in This Release + +### Backwards-incompatible Changes + +- Python 3.5 is not supported anymore. + [#988](https://github.com/python-attrs/attrs/issues/988) + + +### Deprecations + +- Python 3.6 is now deprecated and support will be removed in the next release. + [#1017](https://github.com/python-attrs/attrs/issues/1017) + + +### Changes + +- `attrs.field()` now supports an *alias* option for explicit `__init__` argument names. + + Get `__init__` signatures matching any taste, peculiar or plain! + The [PEP 681 compatible](https://peps.python.org/pep-0681/#field-specifier-parameters) *alias* option can be use to override private attribute name mangling, or add other arbitrary field argument name overrides. + [#950](https://github.com/python-attrs/attrs/issues/950) +- `attrs.NOTHING` is now an enum value, making it possible to use with e.g. [`typing.Literal`](https://docs.python.org/3/library/typing.html#typing.Literal). + [#983](https://github.com/python-attrs/attrs/issues/983) +- Added missing re-import of `attr.AttrsInstance` to the `attrs` namespace. + [#987](https://github.com/python-attrs/attrs/issues/987) +- Fix slight performance regression in classes with custom `__setattr__` and speedup even more. + [#991](https://github.com/python-attrs/attrs/issues/991) +- Class-creation performance improvements by switching performance-sensitive templating operations to f-strings. + + You can expect an improvement of about 5% -- even for very simple classes. + [#995](https://github.com/python-attrs/attrs/issues/995) +- `attrs.has()` is now a [`TypeGuard`](https://docs.python.org/3/library/typing.html#typing.TypeGuard) for `AttrsInstance`. + That means that type checkers know a class is an instance of an `attrs` class if you check it using `attrs.has()` (or `attr.has()`) first. + [#997](https://github.com/python-attrs/attrs/issues/997) +- Made `attrs.AttrsInstance` stub available at runtime and fixed type errors related to the usage of `attrs.AttrsInstance` in *Pyright*. + [#999](https://github.com/python-attrs/attrs/issues/999) +- On Python 3.10 and later, call [`abc.update_abstractmethods()`](https://docs.python.org/3/library/abc.html#abc.update_abstractmethods) on dict classes after creation. + This improves the detection of abstractness. + [#1001](https://github.com/python-attrs/attrs/issues/1001) +- *attrs*'s pickling methods now use dicts instead of tuples. + That is safer and more robust across different versions of a class. + [#1009](https://github.com/python-attrs/attrs/issues/1009) +- Added `attrs.validators.not_(wrapped_validator)` to logically invert *wrapped_validator* by accepting only values where *wrapped_validator* rejects the value with a `ValueError` or `TypeError` (by default, exception types configurable). + [#1010](https://github.com/python-attrs/attrs/issues/1010) +- The type stubs for `attrs.cmp_using()` now have default values. + [#1027](https://github.com/python-attrs/attrs/issues/1027) +- To conform with [PEP 681](https://peps.python.org/pep-0681/), `attr.s()` and `attrs.define()` now accept *unsafe_hash* in addition to *hash*. + [#1065](https://github.com/python-attrs/attrs/issues/1065) + +--- + +[Full changelog](https://www.attrs.org/en/stable/changelog.html) diff --git a/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/RECORD new file mode 100644 index 0000000..181a122 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/RECORD @@ -0,0 +1,56 @@ +attr/__init__.py,sha256=-lJ5CXKE5yKk97Z2HSMRJFiGz1TdXLU9q4Ysb2Id4IQ,1947 +attr/__init__.pyi,sha256=qOjUNync7Lq8NLk30l_DRTh1h62mMl1e4VnqBgY2x24,15831 +attr/__pycache__/__init__.cpython-310.pyc,, +attr/__pycache__/_cmp.cpython-310.pyc,, +attr/__pycache__/_compat.cpython-310.pyc,, +attr/__pycache__/_config.cpython-310.pyc,, +attr/__pycache__/_funcs.cpython-310.pyc,, +attr/__pycache__/_make.cpython-310.pyc,, +attr/__pycache__/_next_gen.cpython-310.pyc,, +attr/__pycache__/_version_info.cpython-310.pyc,, +attr/__pycache__/converters.cpython-310.pyc,, +attr/__pycache__/exceptions.cpython-310.pyc,, +attr/__pycache__/filters.cpython-310.pyc,, +attr/__pycache__/setters.cpython-310.pyc,, +attr/__pycache__/validators.cpython-310.pyc,, +attr/_cmp.py,sha256=mwr1ImJlkFL9Zi0E55-90IfchMKr94ko6e-p4y__M_4,4094 +attr/_cmp.pyi,sha256=sGQmOM0w3_K4-X8cTXR7g0Hqr290E8PTObA9JQxWQqc,399 +attr/_compat.py,sha256=Da-SeMicy7SkTKCCwKtfX41sUMf0o54tK96zsu1qE60,5435 +attr/_config.py,sha256=5W8lgRePuIOWu1ZuqF1899e2CmXGc95-ipwTpF1cEU4,826 +attr/_funcs.py,sha256=0EqqZgKNZBk4PXQvCF_fuWWAz14mSdZpk4UBZpX_fDQ,14545 +attr/_make.py,sha256=MdYHoWXJ2WlQNZPMTX4gkBO06QgPyb3qwSWSxaJ6QVg,96087 +attr/_next_gen.py,sha256=95DRKAfIuHbcwO9W_yWtRsHt3IbfxbAgpyB6agxbghw,6059 +attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469 +attr/_version_info.py,sha256=exSqb3b5E-fMSsgZAlEw9XcLpEgobPORCZpcaEglAM4,2121 +attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 +attr/converters.py,sha256=xfGVSPRgWGcym6N5FZM9fyfvCQePqFyApWeC5BXKvoM,3602 +attr/converters.pyi,sha256=jKlpHBEt6HVKJvgrMFJRrHq8p61GXg4-Nd5RZWKJX7M,406 +attr/exceptions.py,sha256=ZGEMLv0CDY1TOtj49OF84myejOn-LCCXAKGIKalKkVU,1915 +attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 +attr/filters.py,sha256=aZep54h8-4ZYV5lmZ3Dx2mqeQH4cMx6tuCmCylLNbEU,1038 +attr/filters.pyi,sha256=_Sm80jGySETX_Clzdkon5NHVjQWRl3Y3liQKZX1czXc,215 +attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attr/setters.py,sha256=pbCZQ-pE6ZxjDqZfWWUhUFefXtpekIU4qS_YDMLPQ50,1400 +attr/setters.pyi,sha256=pyY8TVNBu8TWhOldv_RxHzmGvdgFQH981db70r0fn5I,567 +attr/validators.py,sha256=gBJAzoo1UNDRTG9-kE0LUoUTgDr2slJymPxb6-UPt7c,20501 +attr/validators.pyi,sha256=ZbJDuF6Kom-L6ym9Cc6eT370S_a7z8YhWmP7z35ayXc,2538 +attrs-22.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +attrs-22.2.0.dist-info/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109 +attrs-22.2.0.dist-info/METADATA,sha256=jgQypZGK_yplaxCh1S1gnQ_NZYKk-EwtfWygdZ_NgIc,13531 +attrs-22.2.0.dist-info/RECORD,, +attrs-22.2.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +attrs-22.2.0.dist-info/top_level.txt,sha256=AGbmKnOtYpdkLRsDRQVSBIwfL32pAQ6BSo1mt-BxI7M,11 +attrs/__init__.py,sha256=90bKLoqyIHpMjnzJuXSar1dH5anUQXHqT7-yI1Qzg00,1149 +attrs/__init__.pyi,sha256=KMHncABV_sq4pubLAli-iOQjc9EM3g9y2r6M9V71_vY,2148 +attrs/__pycache__/__init__.cpython-310.pyc,, +attrs/__pycache__/converters.cpython-310.pyc,, +attrs/__pycache__/exceptions.cpython-310.pyc,, +attrs/__pycache__/filters.cpython-310.pyc,, +attrs/__pycache__/setters.cpython-310.pyc,, +attrs/__pycache__/validators.cpython-310.pyc,, +attrs/converters.py,sha256=fCBEdlYWcmI3sCnpUk2pz22GYtXzqTkp6NeOpdI64PY,70 +attrs/exceptions.py,sha256=SlDli6AY77f6ny-H7oy98OkQjsrw-D_supEuErIVYkE,70 +attrs/filters.py,sha256=dc_dNey29kH6KLU1mT2Dakq7tZ3kBfzEGwzOmDzw1F8,67 +attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs/setters.py,sha256=oKw51C72Hh45wTwYvDHJP9kbicxiMhMR4Y5GvdpKdHQ,67 +attrs/validators.py,sha256=4ag1SyVD2Hm3PYKiNG_NOtR_e7f81Hr6GiNl4YvXo4Q,70 diff --git a/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/WHEEL new file mode 100644 index 0000000..57e3d84 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/top_level.txt new file mode 100644 index 0000000..eca8ba9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attrs-22.2.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +attr +attrs diff --git a/venv/lib/python3.10/site-packages/attrs/__init__.py b/venv/lib/python3.10/site-packages/attrs/__init__.py new file mode 100644 index 0000000..81dd6b2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attrs/__init__.py @@ -0,0 +1,72 @@ +# SPDX-License-Identifier: MIT + +from attr import ( + NOTHING, + Attribute, + AttrsInstance, + Factory, + __author__, + __copyright__, + __description__, + __doc__, + __email__, + __license__, + __title__, + __url__, + __version__, + __version_info__, + assoc, + cmp_using, + define, + evolve, + field, + fields, + fields_dict, + frozen, + has, + make_class, + mutable, + resolve_types, + validate, +) +from attr._next_gen import asdict, astuple + +from . import converters, exceptions, filters, setters, validators + + +__all__ = [ + "__author__", + "__copyright__", + "__description__", + "__doc__", + "__email__", + "__license__", + "__title__", + "__url__", + "__version__", + "__version_info__", + "asdict", + "assoc", + "astuple", + "Attribute", + "AttrsInstance", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "Factory", + "field", + "fields_dict", + "fields", + "filters", + "frozen", + "has", + "make_class", + "mutable", + "NOTHING", + "resolve_types", + "setters", + "validate", + "validators", +] diff --git a/venv/lib/python3.10/site-packages/attrs/__init__.pyi b/venv/lib/python3.10/site-packages/attrs/__init__.pyi new file mode 100644 index 0000000..4ea64d8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attrs/__init__.pyi @@ -0,0 +1,67 @@ +from typing import ( + Any, + Callable, + Dict, + Mapping, + Optional, + Sequence, + Tuple, + Type, +) + +# Because we need to type our own stuff, we have to make everything from +# attr explicitly public too. +from attr import __author__ as __author__ +from attr import __copyright__ as __copyright__ +from attr import __description__ as __description__ +from attr import __email__ as __email__ +from attr import __license__ as __license__ +from attr import __title__ as __title__ +from attr import __url__ as __url__ +from attr import __version__ as __version__ +from attr import __version_info__ as __version_info__ +from attr import _FilterType +from attr import assoc as assoc +from attr import Attribute as Attribute +from attr import AttrsInstance as AttrsInstance +from attr import cmp_using as cmp_using +from attr import converters as converters +from attr import define as define +from attr import evolve as evolve +from attr import exceptions as exceptions +from attr import Factory as Factory +from attr import field as field +from attr import fields as fields +from attr import fields_dict as fields_dict +from attr import filters as filters +from attr import frozen as frozen +from attr import has as has +from attr import make_class as make_class +from attr import mutable as mutable +from attr import NOTHING as NOTHING +from attr import resolve_types as resolve_types +from attr import setters as setters +from attr import validate as validate +from attr import validators as validators + +# TODO: see definition of attr.asdict/astuple +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: bool = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... diff --git a/venv/lib/python3.10/site-packages/attrs/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/attrs/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..a85060b Binary files /dev/null and b/venv/lib/python3.10/site-packages/attrs/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attrs/__pycache__/converters.cpython-310.pyc b/venv/lib/python3.10/site-packages/attrs/__pycache__/converters.cpython-310.pyc new file mode 100644 index 0000000..a0a7c4a Binary files /dev/null and b/venv/lib/python3.10/site-packages/attrs/__pycache__/converters.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attrs/__pycache__/exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/attrs/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000..b5b7916 Binary files /dev/null and b/venv/lib/python3.10/site-packages/attrs/__pycache__/exceptions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attrs/__pycache__/filters.cpython-310.pyc b/venv/lib/python3.10/site-packages/attrs/__pycache__/filters.cpython-310.pyc new file mode 100644 index 0000000..85e6d85 Binary files /dev/null and b/venv/lib/python3.10/site-packages/attrs/__pycache__/filters.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attrs/__pycache__/setters.cpython-310.pyc b/venv/lib/python3.10/site-packages/attrs/__pycache__/setters.cpython-310.pyc new file mode 100644 index 0000000..bd4e3dd Binary files /dev/null and b/venv/lib/python3.10/site-packages/attrs/__pycache__/setters.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attrs/__pycache__/validators.cpython-310.pyc b/venv/lib/python3.10/site-packages/attrs/__pycache__/validators.cpython-310.pyc new file mode 100644 index 0000000..2799d1d Binary files /dev/null and b/venv/lib/python3.10/site-packages/attrs/__pycache__/validators.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/attrs/converters.py b/venv/lib/python3.10/site-packages/attrs/converters.py new file mode 100644 index 0000000..edfa8d3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attrs/converters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.converters import * # noqa diff --git a/venv/lib/python3.10/site-packages/attrs/exceptions.py b/venv/lib/python3.10/site-packages/attrs/exceptions.py new file mode 100644 index 0000000..bd9efed --- /dev/null +++ b/venv/lib/python3.10/site-packages/attrs/exceptions.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.exceptions import * # noqa diff --git a/venv/lib/python3.10/site-packages/attrs/filters.py b/venv/lib/python3.10/site-packages/attrs/filters.py new file mode 100644 index 0000000..5295900 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attrs/filters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.filters import * # noqa diff --git a/venv/lib/python3.10/site-packages/attrs/py.typed b/venv/lib/python3.10/site-packages/attrs/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/attrs/setters.py b/venv/lib/python3.10/site-packages/attrs/setters.py new file mode 100644 index 0000000..9b50770 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attrs/setters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.setters import * # noqa diff --git a/venv/lib/python3.10/site-packages/attrs/validators.py b/venv/lib/python3.10/site-packages/attrs/validators.py new file mode 100644 index 0000000..ab2c9b3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/attrs/validators.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.validators import * # noqa diff --git a/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/LICENSE b/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/LICENSE new file mode 100644 index 0000000..0a64774 --- /dev/null +++ b/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/LICENSE @@ -0,0 +1,21 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/METADATA b/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/METADATA new file mode 100644 index 0000000..aeb1991 --- /dev/null +++ b/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/METADATA @@ -0,0 +1,83 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2022.12.7 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.6 +License-File: LICENSE + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +In previous versions, ``certifi`` provided the ``certifi.old_where()`` function +to intentionally re-add the 1024-bit roots back into your bundle. This was not +recommended in production and therefore was removed at the end of 2018. + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. + + diff --git a/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/RECORD b/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/RECORD new file mode 100644 index 0000000..588b566 --- /dev/null +++ b/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/RECORD @@ -0,0 +1,14 @@ +certifi-2022.12.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2022.12.7.dist-info/LICENSE,sha256=oC9sY4-fuE0G93ZMOrCF2K9-2luTwWbaVDEkeQd8b7A,1052 +certifi-2022.12.7.dist-info/METADATA,sha256=chFpcxKhCPEQ3d8-Vz36zr2Micf1eQhKkFFk7_JvJNo,2911 +certifi-2022.12.7.dist-info/RECORD,, +certifi-2022.12.7.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +certifi-2022.12.7.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=bK_nm9bLJzNvWZc2oZdiTwg2KWD4HSPBWGaM0zUDvMw,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-310.pyc,, +certifi/__pycache__/__main__.cpython-310.pyc,, +certifi/__pycache__/core.cpython-310.pyc,, +certifi/cacert.pem,sha256=LBHDzgj_xA05AxnHK8ENT5COnGNElNZe0svFUHMf1SQ,275233 +certifi/core.py,sha256=lhewz0zFb2b4ULsQurElmloYwQoecjWzPqY67P8T7iM,4219 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/WHEEL b/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/certifi-2022.12.7.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/venv/lib/python3.10/site-packages/certifi/__init__.py b/venv/lib/python3.10/site-packages/certifi/__init__.py new file mode 100644 index 0000000..a3546f1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/certifi/__init__.py @@ -0,0 +1,4 @@ +from .core import contents, where + +__all__ = ["contents", "where"] +__version__ = "2022.12.07" diff --git a/venv/lib/python3.10/site-packages/certifi/__main__.py b/venv/lib/python3.10/site-packages/certifi/__main__.py new file mode 100644 index 0000000..8945b5d --- /dev/null +++ b/venv/lib/python3.10/site-packages/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/venv/lib/python3.10/site-packages/certifi/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/certifi/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..e3ffee3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/certifi/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/certifi/__pycache__/__main__.cpython-310.pyc b/venv/lib/python3.10/site-packages/certifi/__pycache__/__main__.cpython-310.pyc new file mode 100644 index 0000000..7206262 Binary files /dev/null and b/venv/lib/python3.10/site-packages/certifi/__pycache__/__main__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/certifi/__pycache__/core.cpython-310.pyc b/venv/lib/python3.10/site-packages/certifi/__pycache__/core.cpython-310.pyc new file mode 100644 index 0000000..f86fb1a Binary files /dev/null and b/venv/lib/python3.10/site-packages/certifi/__pycache__/core.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/certifi/cacert.pem b/venv/lib/python3.10/site-packages/certifi/cacert.pem new file mode 100644 index 0000000..df9e4e3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/certifi/cacert.pem @@ -0,0 +1,4527 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Label: "Hongkong Post Root CA 1" +# Serial: 1000 +# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca +# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 +# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx +FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg +Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG +A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr +b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ +jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn +PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh +ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 +nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h +q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED +MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC +mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 +7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB +oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs +EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO +fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi +AmvZWg== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Subject: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Label: "E-Tugra Global Root CA RSA v3" +# Serial: 75951268308633135324246244059508261641472512052 +# MD5 Fingerprint: 22:be:10:f6:c2:f8:03:88:73:5f:33:29:47:28:47:a4 +# SHA1 Fingerprint: e9:a8:5d:22:14:52:1c:5b:aa:0a:b4:be:24:6a:23:8a:c9:ba:e2:a9 +# SHA256 Fingerprint: ef:66:b0:b1:0a:3c:db:9f:2e:36:48:c7:6b:d2:af:18:ea:d2:bf:e6:f1:17:65:5e:28:c4:06:0d:a1:a3:f4:c2 +-----BEGIN CERTIFICATE----- +MIIF8zCCA9ugAwIBAgIUDU3FzRYilZYIfrgLfxUGNPt5EDQwDQYJKoZIhvcNAQEL +BQAwgYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUt +VHVncmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYw +JAYDVQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIFJTQSB2MzAeFw0yMDAzMTgw +OTA3MTdaFw00NTAzMTIwOTA3MTdaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMG +QW5rYXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1 +Z3JhIFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBD +QSBSU0EgdjMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCiZvCJt3J7 +7gnJY9LTQ91ew6aEOErxjYG7FL1H6EAX8z3DeEVypi6Q3po61CBxyryfHUuXCscx +uj7X/iWpKo429NEvx7epXTPcMHD4QGxLsqYxYdE0PD0xesevxKenhOGXpOhL9hd8 +7jwH7eKKV9y2+/hDJVDqJ4GohryPUkqWOmAalrv9c/SF/YP9f4RtNGx/ardLAQO/ +rWm31zLZ9Vdq6YaCPqVmMbMWPcLzJmAy01IesGykNz709a/r4d+ABs8qQedmCeFL +l+d3vSFtKbZnwy1+7dZ5ZdHPOrbRsV5WYVB6Ws5OUDGAA5hH5+QYfERaxqSzO8bG +wzrwbMOLyKSRBfP12baqBqG3q+Sx6iEUXIOk/P+2UNOMEiaZdnDpwA+mdPy70Bt4 +znKS4iicvObpCdg604nmvi533wEKb5b25Y08TVJ2Glbhc34XrD2tbKNSEhhw5oBO +M/J+JjKsBY04pOZ2PJ8QaQ5tndLBeSBrW88zjdGUdjXnXVXHt6woq0bM5zshtQoK +5EpZ3IE1S0SVEgpnpaH/WwAH0sDM+T/8nzPyAPiMbIedBi3x7+PmBvrFZhNb/FAH +nnGGstpvdDDPk1Po3CLW3iAfYY2jLqN4MpBs3KwytQXk9TwzDdbgh3cXTJ2w2Amo +DVf3RIXwyAS+XF1a4xeOVGNpf0l0ZAWMowIDAQABo2MwYTAPBgNVHRMBAf8EBTAD +AQH/MB8GA1UdIwQYMBaAFLK0ruYt9ybVqnUtdkvAG1Mh0EjvMB0GA1UdDgQWBBSy +tK7mLfcm1ap1LXZLwBtTIdBI7zAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEL +BQADggIBAImocn+M684uGMQQgC0QDP/7FM0E4BQ8Tpr7nym/Ip5XuYJzEmMmtcyQ +6dIqKe6cLcwsmb5FJ+Sxce3kOJUxQfJ9emN438o2Fi+CiJ+8EUdPdk3ILY7r3y18 +Tjvarvbj2l0Upq7ohUSdBm6O++96SmotKygY/r+QLHUWnw/qln0F7psTpURs+APQ +3SPh/QMSEgj0GDSz4DcLdxEBSL9htLX4GdnLTeqjjO/98Aa1bZL0SmFQhO3sSdPk +vmjmLuMxC1QLGpLWgti2omU8ZgT5Vdps+9u1FGZNlIM7zR6mK7L+d0CGq+ffCsn9 +9t2HVhjYsCxVYJb6CH5SkPVLpi6HfMsg2wY+oF0Dd32iPBMbKaITVaA9FCKvb7jQ +mhty3QUBjYZgv6Rn7rWlDdF/5horYmbDB7rnoEgcOMPpRfunf/ztAmgayncSd6YA +VSgU7NbHEqIbZULpkejLPoeJVF3Zr52XnGnnCv8PWniLYypMfUeUP95L6VPQMPHF +9p5J3zugkaOj/s1YzOrfr28oO6Bpm4/srK4rVJ2bBLFHIK+WEj5jlB0E5y67hscM +moi/dkfv97ALl2bSRM9gUgfh1SxKOidhd8rXj+eHDjD/DLsE4mHDosiXYY60MGo8 +bcIHX0pzLz/5FooBZu+6kcpSV3uu1OYP3Qt6f4ueJiDPO++BcYNZ +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Subject: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Label: "E-Tugra Global Root CA ECC v3" +# Serial: 218504919822255052842371958738296604628416471745 +# MD5 Fingerprint: 46:bc:81:bb:f1:b5:1e:f7:4b:96:bc:14:e2:e7:27:64 +# SHA1 Fingerprint: 8a:2f:af:57:53:b1:b0:e6:a1:04:ec:5b:6a:69:71:6d:f6:1c:e2:84 +# SHA256 Fingerprint: 87:3f:46:85:fa:7f:56:36:25:25:2e:6d:36:bc:d7:f1:6f:c2:49:51:f2:64:e4:7e:1b:95:4f:49:08:cd:ca:13 +-----BEGIN CERTIFICATE----- +MIICpTCCAiqgAwIBAgIUJkYZdzHhT28oNt45UYbm1JeIIsEwCgYIKoZIzj0EAwMw +gYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUtVHVn +cmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYwJAYD +VQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIEVDQyB2MzAeFw0yMDAzMTgwOTQ2 +NThaFw00NTAzMTIwOTQ2NThaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMGQW5r +YXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1Z3Jh +IFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBDQSBF +Q0MgdjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASOmCm/xxAeJ9urA8woLNheSBkQ +KczLWYHMjLiSF4mDKpL2w6QdTGLVn9agRtwcvHbB40fQWxPa56WzZkjnIZpKT4YK +fWzqTTKACrJ6CZtpS5iB4i7sAnCWH/31Rs7K3IKjYzBhMA8GA1UdEwEB/wQFMAMB +Af8wHwYDVR0jBBgwFoAU/4Ixcj75xGZsrTie0bBRiKWQzPUwHQYDVR0OBBYEFP+C +MXI++cRmbK04ntGwUYilkMz1MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNp +ADBmAjEA5gVYaWHlLcoNy/EZCL3W/VGSGn5jVASQkZo1kTmZ+gepZpO6yGjUij/6 +7W4WAie3AjEA3VoXK3YdZUKWpqxdinlW2Iob35reX8dQj7FbcQwm32pAAOwzkSFx +vmjkI6TZraE3 +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication RootCA3" +# Serial: 16247922307909811815 +# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 +# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a +# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 +-----BEGIN CERTIFICATE----- +MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV +BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw +JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 +MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc +U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg +Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r +CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA +lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG +TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 +9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 +8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 +g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we +GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst ++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M +0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ +T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw +HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS +YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA +FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd +9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI +UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ +OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke +gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf +iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV +nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD +2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// +1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad +TdJ0MN1kURXbg4NR16/9M51NZg== +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- diff --git a/venv/lib/python3.10/site-packages/certifi/core.py b/venv/lib/python3.10/site-packages/certifi/core.py new file mode 100644 index 0000000..de02898 --- /dev/null +++ b/venv/lib/python3.10/site-packages/certifi/core.py @@ -0,0 +1,108 @@ +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import sys + + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + def contents() -> str: + return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +elif sys.version_info >= (3, 7): + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") + +else: + import os + import types + from typing import Union + + Package = Union[types.ModuleType, str] + Resource = Union[str, "os.PathLike"] + + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict' + ) -> str: + with open(where(), encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where() -> str: + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/venv/lib/python3.10/site-packages/certifi/py.typed b/venv/lib/python3.10/site-packages/certifi/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/LICENSE b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/LICENSE new file mode 100644 index 0000000..ad82355 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/METADATA new file mode 100644 index 0000000..867b915 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/METADATA @@ -0,0 +1,616 @@ +Metadata-Version: 2.1 +Name: charset-normalizer +Version: 3.1.0 +Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. +Home-page: https://github.com/Ousret/charset_normalizer +Author: Ahmed TAHRI +Author-email: ahmed.tahri@cloudnursery.dev +License: MIT +Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues +Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest +Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Text Processing :: Linguistic +Classifier: Topic :: Utilities +Classifier: Typing :: Typed +Requires-Python: >=3.7.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: unicode_backport + +

    Charset Detection, for Everyone 👋

    + +

    + The Real First Universal Charset Detector
    + + + + + + + + Download Count Total + +

    + +> A library that helps you read text from an unknown charset encoding.
    Motivated by `chardet`, +> I'm trying to resolve the issue by taking a new approach. +> All IANA character set names for which the Python core library provides codecs are supported. + +

    + >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< +

    + +This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. + +| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | +|--------------------------------------------------|:---------------------------------------------:|:------------------------------------------------------------------------------------------------------:|:-----------------------------------------------:| +| `Fast` | ❌
    | ✅
    | ✅
    | +| `Universal**` | ❌ | ✅ | ❌ | +| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | +| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | +| `License` | LGPL-2.1
    _restrictive_ | MIT | MPL-1.1
    _restrictive_ | +| `Native Python` | ✅ | ✅ | ❌ | +| `Detect spoken language` | ❌ | ✅ | N/A | +| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ | +| `Whl Size` | 193.6 kB | 39.5 kB | ~200 kB | +| `Supported Encoding` | 33 | :tada: [90](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 | + +

    +Reading Normalized TextCat Reading Text + +*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
    +Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html) + +## ⭐ Your support + +*Fork, test-it, star-it, submit your ideas! We do listen.* + +## ⚡ Performance + +This package offer better performance than its counterpart Chardet. Here are some numbers. + +| Package | Accuracy | Mean per file (ms) | File per sec (est) | +|-----------------------------------------------|:--------:|:------------------:|:------------------:| +| [chardet](https://github.com/chardet/chardet) | 86 % | 200 ms | 5 file/sec | +| charset-normalizer | **98 %** | **10 ms** | 100 file/sec | + +| Package | 99th percentile | 95th percentile | 50th percentile | +|-----------------------------------------------|:---------------:|:---------------:|:---------------:| +| [chardet](https://github.com/chardet/chardet) | 1200 ms | 287 ms | 23 ms | +| charset-normalizer | 100 ms | 50 ms | 5 ms | + +Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. + +> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. +> And yes, these results might change at any time. The dataset can be updated to include more files. +> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. +> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability +> (eg. Supported Encoding) Challenge-them if you want. + +## ✨ Installation + +Using PyPi for latest stable +```sh +pip install charset-normalizer -U +``` + +## 🚀 Basic Usage + +### CLI +This package comes with a CLI. + +``` +usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] + file [file ...] + +The Real First Universal Charset Detector. Discover originating encoding used +on text file. Normalize text to unicode. + +positional arguments: + files File(s) to be analysed + +optional arguments: + -h, --help show this help message and exit + -v, --verbose Display complementary information about file if any. + Stdout will contain logs about the detection process. + -a, --with-alternative + Output complementary possibilities if any. Top-level + JSON WILL be a list. + -n, --normalize Permit to normalize input file. If not set, program + does not write anything. + -m, --minimal Only output the charset detected to STDOUT. Disabling + JSON output. + -r, --replace Replace file when trying to normalize it instead of + creating a new one. + -f, --force Replace file without asking if you are sure, use this + flag with caution. + -t THRESHOLD, --threshold THRESHOLD + Define a custom maximum amount of chaos allowed in + decoded content. 0. <= chaos <= 1. + --version Show version information and exit. +``` + +```bash +normalizer ./data/sample.1.fr.srt +``` + +:tada: Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. + +```json +{ + "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", + "encoding": "cp1252", + "encoding_aliases": [ + "1252", + "windows_1252" + ], + "alternative_encodings": [ + "cp1254", + "cp1256", + "cp1258", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + "mbcs" + ], + "language": "French", + "alphabets": [ + "Basic Latin", + "Latin-1 Supplement" + ], + "has_sig_or_bom": false, + "chaos": 0.149, + "coherence": 97.152, + "unicode_path": null, + "is_preferred": true +} +``` + +### Python +*Just print out normalized text* +```python +from charset_normalizer import from_path + +results = from_path('./my_subtitle.srt') + +print(str(results.best())) +``` + +*Upgrade your code without effort* +```python +from charset_normalizer import detect +``` + +The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. + +See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) + +## 😇 Why + +When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a +reliable alternative using a completely different method. Also! I never back down on a good challenge! + +I **don't care** about the **originating charset** encoding, because **two different tables** can +produce **two identical rendered string.** +What I want is to get readable text, the best I can. + +In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 + +Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. + +## 🍰 How + + - Discard all charset encoding table that could not fit the binary content. + - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding. + - Extract matches with the lowest mess detected. + - Additionally, we measure coherence / probe for a language. + +**Wait a minute**, what is noise/mess and coherence according to **YOU ?** + +*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then +**I established** some ground rules about **what is obvious** when **it seems like** a mess. + I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to + improve or rewrite it. + +*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought +that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. + +## ⚡ Known limitations + + - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) + - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. + +## ⚠️ About Python EOLs + +**If you are running:** + +- Python >=2.7,<3.5: Unsupported +- Python 3.5: charset-normalizer < 2.1 +- Python 3.6: charset-normalizer < 3.1 + +Upgrade your Python interpreter as soon as possible. + +## 👤 Contributing + +Contributions, issues and feature requests are very much welcome.
    +Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. + +## 📝 License + +Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).
    +This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. + +Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) + +## 💼 For Enterprise + +Professional support for charset-normalizer is available as part of the [Tidelift +Subscription][1]. Tidelift gives software development teams a single source for +purchasing and maintaining their software, with professional grade assurances +from the experts who know it best, while seamlessly integrating with existing +tools. + +[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme + +# Changelog +All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06) + +### Added +- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262) + +### Removed +- Support for Python 3.6 (PR #260) + +### Changed +- Optional speedup provided by mypy/c 1.0.1 + +## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18) + +### Fixed +- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233) + +### Changed +- Speedup provided by mypy/c 0.990 on Python >= 3.7 + +## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20) + +### Added +- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results +- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES +- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio +- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) + +### Changed +- Build with static metadata using 'build' frontend +- Make the language detection stricter +- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 + +### Fixed +- CLI with opt --normalize fail when using full path for files +- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it +- Sphinx warnings when generating the documentation + +### Removed +- Coherence detector no longer return 'Simple English' instead return 'English' +- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' +- Breaking: Method `first()` and `best()` from CharsetMatch +- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) +- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches +- Breaking: Top-level function `normalize` +- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch +- Support for the backport `unicodedata2` + +## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18) + +### Added +- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results +- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES +- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio + +### Changed +- Build with static metadata using 'build' frontend +- Make the language detection stricter + +### Fixed +- CLI with opt --normalize fail when using full path for files +- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it + +### Removed +- Coherence detector no longer return 'Simple English' instead return 'English' +- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' + +## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21) + +### Added +- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) + +### Removed +- Breaking: Method `first()` and `best()` from CharsetMatch +- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) + +### Fixed +- Sphinx warnings when generating the documentation + +## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15) + +### Changed +- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 + +### Removed +- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches +- Breaking: Top-level function `normalize` +- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch +- Support for the backport `unicodedata2` + +## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19) + +### Deprecated +- Function `normalize` scheduled for removal in 3.0 + +### Changed +- Removed useless call to decode in fn is_unprintable (#206) + +### Fixed +- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204) + +## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19) + +### Added +- Output the Unicode table version when running the CLI with `--version` (PR #194) + +### Changed +- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175) +- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183) + +### Fixed +- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175) +- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181) + +### Removed +- Support for Python 3.5 (PR #192) + +### Deprecated +- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194) + +## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12) + +### Fixed +- ASCII miss-detection on rare cases (PR #170) + +## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30) + +### Added +- Explicit support for Python 3.11 (PR #164) + +### Changed +- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165) + +## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04) + +### Fixed +- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154) + +### Changed +- Skipping the language-detection (CD) on ASCII (PR #155) + +## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03) + +### Changed +- Moderating the logging impact (since 2.0.8) for specific environments (PR #147) + +### Fixed +- Wrong logging level applied when setting kwarg `explain` to True (PR #146) + +## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24) +### Changed +- Improvement over Vietnamese detection (PR #126) +- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124) +- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122) +- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129) +- Code style as refactored by Sourcery-AI (PR #131) +- Minor adjustment on the MD around european words (PR #133) +- Remove and replace SRTs from assets / tests (PR #139) +- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135) +- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135) + +### Fixed +- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137) +- Avoid using too insignificant chunk (PR #137) + +### Added +- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135) +- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141) + +## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11) +### Added +- Add support for Kazakh (Cyrillic) language detection (PR #109) + +### Changed +- Further, improve inferring the language from a given single-byte code page (PR #112) +- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116) +- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113) +- Various detection improvement (MD+CD) (PR #117) + +### Removed +- Remove redundant logging entry about detected language(s) (PR #115) + +### Fixed +- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102) + +## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18) +### Fixed +- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100) +- Fix CLI crash when using --minimal output in certain cases (PR #103) + +### Changed +- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101) + +## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14) +### Changed +- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81) +- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82) +- The Unicode detection is slightly improved (PR #93) +- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91) + +### Removed +- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92) + +### Fixed +- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95) +- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96) +- The MANIFEST.in was not exhaustive (PR #78) + +## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30) +### Fixed +- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70) +- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68) +- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72) +- Submatch factoring could be wrong in rare edge cases (PR #72) +- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72) +- Fix line endings from CRLF to LF for certain project files (PR #67) + +### Changed +- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76) +- Allow fallback on specified encoding if any (PR #71) + +## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16) +### Changed +- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63) +- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64) + +## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15) +### Fixed +- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59) + +### Changed +- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57) + +## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13) +### Fixed +- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55) +- Using explain=False permanently disable the verbose output in the current runtime (PR #47) +- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47) +- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52) + +### Changed +- Public function normalize default args values were not aligned with from_bytes (PR #53) + +### Added +- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47) + +## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02) +### Changed +- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet. +- Accent has been made on UTF-8 detection, should perform rather instantaneous. +- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible. +- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time) +- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+ +- utf_7 detection has been reinstated. + +### Removed +- This package no longer require anything when used with Python 3.5 (Dropped cached_property) +- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian. +- The exception hook on UnicodeDecodeError has been removed. + +### Deprecated +- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0 + +### Fixed +- The CLI output used the relative path of the file(s). Should be absolute. + +## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28) +### Fixed +- Logger configuration/usage no longer conflict with others (PR #44) + +## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21) +### Removed +- Using standard logging instead of using the package loguru. +- Dropping nose test framework in favor of the maintained pytest. +- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text. +- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version. +- Stop support for UTF-7 that does not contain a SIG. +- Dropping PrettyTable, replaced with pure JSON output in CLI. + +### Fixed +- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process. +- Not searching properly for the BOM when trying utf32/16 parent codec. + +### Changed +- Improving the package final size by compressing frequencies.json. +- Huge improvement over the larges payload. + +### Added +- CLI now produces JSON consumable output. +- Return ASCII if given sequences fit. Given reasonable confidence. + +## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13) + +### Fixed +- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40) + +## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12) + +### Fixed +- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39) + +## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12) + +### Fixed +- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38) + +## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09) + +### Changed +- Amend the previous release to allow prettytable 2.0 (PR #35) + +## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08) + +### Fixed +- Fix error while using the package with a python pre-release interpreter (PR #33) + +### Changed +- Dependencies refactoring, constraints revised. + +### Added +- Add python 3.9 and 3.10 to the supported interpreters + +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/RECORD new file mode 100644 index 0000000..fe685e9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/RECORD @@ -0,0 +1,35 @@ +../../../bin/normalizer,sha256=gNjvsdbRtEzoXE0CcHWgw6rjcjfwMp6ZUfRO5nJNkxs,275 +charset_normalizer-3.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +charset_normalizer-3.1.0.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070 +charset_normalizer-3.1.0.dist-info/METADATA,sha256=8lfcrrmtfEq--eZqh8FJzEjptLCEoGXySKruxIms44I,30983 +charset_normalizer-3.1.0.dist-info/RECORD,, +charset_normalizer-3.1.0.dist-info/WHEEL,sha256=nKSwEH5fkxvG0Vdj1Hx7vbuU-SGQ9Nxl4yFFsCilvhs,152 +charset_normalizer-3.1.0.dist-info/entry_points.txt,sha256=uYo8aIGLWv8YgWfSna5HnfY_En4pkF1w4bgawNAXzP0,76 +charset_normalizer-3.1.0.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 +charset_normalizer/__init__.py,sha256=aAb_F9Zb23pb4NO6TfIfqLXLvf1PjnLBBOuPvQwPA18,1549 +charset_normalizer/__pycache__/__init__.cpython-310.pyc,, +charset_normalizer/__pycache__/api.cpython-310.pyc,, +charset_normalizer/__pycache__/cd.cpython-310.pyc,, +charset_normalizer/__pycache__/constant.cpython-310.pyc,, +charset_normalizer/__pycache__/legacy.cpython-310.pyc,, +charset_normalizer/__pycache__/md.cpython-310.pyc,, +charset_normalizer/__pycache__/models.cpython-310.pyc,, +charset_normalizer/__pycache__/utils.cpython-310.pyc,, +charset_normalizer/__pycache__/version.cpython-310.pyc,, +charset_normalizer/api.py,sha256=Vh44rFXztkxCjW7gF2waq8TyRL3mXKX8RwNGB99bhb4,18624 +charset_normalizer/assets/__init__.py,sha256=wpRfujN7GJuEE5wHHo3wEDVoJ5ovzRIxsImyimCBfGU,20069 +charset_normalizer/assets/__pycache__/__init__.cpython-310.pyc,, +charset_normalizer/cd.py,sha256=mZuiTSKq4XpweSDD2H4T4R3Axtaa-QS0tpEWdpMuAzQ,12554 +charset_normalizer/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc,, +charset_normalizer/cli/__pycache__/normalizer.cpython-310.pyc,, +charset_normalizer/cli/normalizer.py,sha256=2F-xURZJzo063Ye-2RLJ2wcmURpbKeAzKwpiws65dAs,9744 +charset_normalizer/constant.py,sha256=PmCeoKXqq3ZbCtCUpKHwwFBIv9DXMT_an1yd24q28mA,19101 +charset_normalizer/legacy.py,sha256=T-QuVMsMeDiQEk8WSszMrzVJg_14AMeSkmHdRYhdl1k,2071 +charset_normalizer/md.cpython-310-x86_64-linux-gnu.so,sha256=VRykbQIynSswzdrRbruysjgWzNW9fAXVUn06wgATyhc,17496 +charset_normalizer/md.py,sha256=MXPKP_oLbsubulEL_1rxcYKSd5FeEfyEfNNm5O6ADpc,18258 +charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so,sha256=r7AOFco7yQOyP83eUuE0S1s6UENwSuxbZDmRJzWrEHQ,424312 +charset_normalizer/models.py,sha256=mC11wo84l00u2o03TRNX7M5ItBAbPUKKXgJSFxA35GY,11492 +charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/utils.py,sha256=tKLpquPYQdaRdFRwBo5gPOi06ov8UCJy5X1Pti0Q78U,11544 +charset_normalizer/version.py,sha256=bekbdpF_D3BtF-PhbPnA9PNaZaI8kKIgl3LTCD5FmYk,79 diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/WHEEL new file mode 100644 index 0000000..a9630e4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/entry_points.txt new file mode 100644 index 0000000..a06d360 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +normalizer = charset_normalizer.cli.normalizer:cli_detect diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/top_level.txt new file mode 100644 index 0000000..66958f0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer-3.1.0.dist-info/top_level.txt @@ -0,0 +1 @@ +charset_normalizer diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__init__.py b/venv/lib/python3.10/site-packages/charset_normalizer/__init__.py new file mode 100644 index 0000000..ebb5da8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer/__init__.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +""" +Charset-Normalizer +~~~~~~~~~~~~~~ +The Real First Universal Charset Detector. +A library that helps you read text from an unknown charset encoding. +Motivated by chardet, This package is trying to resolve the issue by taking a new approach. +All IANA character set names for which the Python core library provides codecs are supported. + +Basic usage: + >>> from charset_normalizer import from_bytes + >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) + >>> best_guess = results.best() + >>> str(best_guess) + 'Bсеки човек има право на образование. Oбразованието!' + +Others methods and usages are available - see the full documentation +at . +:copyright: (c) 2021 by Ahmed TAHRI +:license: MIT, see LICENSE for more details. +""" +import logging + +from .api import from_bytes, from_fp, from_path +from .legacy import detect +from .models import CharsetMatch, CharsetMatches +from .utils import set_logging_handler +from .version import VERSION, __version__ + +__all__ = ( + "from_fp", + "from_path", + "from_bytes", + "detect", + "CharsetMatch", + "CharsetMatches", + "__version__", + "VERSION", + "set_logging_handler", +) + +# Attach a NullHandler to the top level logger by default +# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library + +logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..2c778bd Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/api.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/api.cpython-310.pyc new file mode 100644 index 0000000..2b28a67 Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/api.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/cd.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/cd.cpython-310.pyc new file mode 100644 index 0000000..e75f402 Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/cd.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/constant.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/constant.cpython-310.pyc new file mode 100644 index 0000000..3864bd3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/constant.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/legacy.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/legacy.cpython-310.pyc new file mode 100644 index 0000000..619f611 Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/legacy.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/md.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/md.cpython-310.pyc new file mode 100644 index 0000000..60c2fbb Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/md.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/models.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/models.cpython-310.pyc new file mode 100644 index 0000000..5688003 Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/models.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000..0340322 Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/version.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/version.cpython-310.pyc new file mode 100644 index 0000000..3979575 Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/api.py b/venv/lib/python3.10/site-packages/charset_normalizer/api.py new file mode 100644 index 0000000..9dbf420 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer/api.py @@ -0,0 +1,554 @@ +import logging +from os import PathLike +from typing import Any, BinaryIO, List, Optional, Set + +from .cd import ( + coherence_ratio, + encoding_languages, + mb_encoding_languages, + merge_coherence_ratios, +) +from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE +from .md import mess_ratio +from .models import CharsetMatch, CharsetMatches +from .utils import ( + any_specified_encoding, + cut_sequence_chunks, + iana_name, + identify_sig_or_bom, + is_cp_similar, + is_multi_byte_encoding, + should_strip_sig_or_bom, +) + +# Will most likely be controversial +# logging.addLevelName(TRACE, "TRACE") +logger = logging.getLogger("charset_normalizer") +explain_handler = logging.StreamHandler() +explain_handler.setFormatter( + logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") +) + + +def from_bytes( + sequences: bytes, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.2, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, +) -> CharsetMatches: + """ + Given a raw bytes sequence, return the best possibles charset usable to render str objects. + If there is no results, it is a strong indicator that the source is binary/not text. + By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence. + And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. + + The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page + but never take it for granted. Can improve the performance. + + You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that + purpose. + + This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. + By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' + toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. + Custom logging format and handler can be set manually. + """ + + if not isinstance(sequences, (bytearray, bytes)): + raise TypeError( + "Expected object of type bytes or bytearray, got: {0}".format( + type(sequences) + ) + ) + + if explain: + previous_logger_level: int = logger.level + logger.addHandler(explain_handler) + logger.setLevel(TRACE) + + length: int = len(sequences) + + if length == 0: + logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level or logging.WARNING) + return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) + + if cp_isolation is not None: + logger.log( + TRACE, + "cp_isolation is set. use this flag for debugging purpose. " + "limited list of encoding allowed : %s.", + ", ".join(cp_isolation), + ) + cp_isolation = [iana_name(cp, False) for cp in cp_isolation] + else: + cp_isolation = [] + + if cp_exclusion is not None: + logger.log( + TRACE, + "cp_exclusion is set. use this flag for debugging purpose. " + "limited list of encoding excluded : %s.", + ", ".join(cp_exclusion), + ) + cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] + else: + cp_exclusion = [] + + if length <= (chunk_size * steps): + logger.log( + TRACE, + "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", + steps, + chunk_size, + length, + ) + steps = 1 + chunk_size = length + + if steps > 1 and length / steps < chunk_size: + chunk_size = int(length / steps) + + is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE + is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE + + if is_too_small_sequence: + logger.log( + TRACE, + "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( + length + ), + ) + elif is_too_large_sequence: + logger.log( + TRACE, + "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( + length + ), + ) + + prioritized_encodings: List[str] = [] + + specified_encoding: Optional[str] = ( + any_specified_encoding(sequences) if preemptive_behaviour else None + ) + + if specified_encoding is not None: + prioritized_encodings.append(specified_encoding) + logger.log( + TRACE, + "Detected declarative mark in sequence. Priority +1 given for %s.", + specified_encoding, + ) + + tested: Set[str] = set() + tested_but_hard_failure: List[str] = [] + tested_but_soft_failure: List[str] = [] + + fallback_ascii: Optional[CharsetMatch] = None + fallback_u8: Optional[CharsetMatch] = None + fallback_specified: Optional[CharsetMatch] = None + + results: CharsetMatches = CharsetMatches() + + sig_encoding, sig_payload = identify_sig_or_bom(sequences) + + if sig_encoding is not None: + prioritized_encodings.append(sig_encoding) + logger.log( + TRACE, + "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", + len(sig_payload), + sig_encoding, + ) + + prioritized_encodings.append("ascii") + + if "utf_8" not in prioritized_encodings: + prioritized_encodings.append("utf_8") + + for encoding_iana in prioritized_encodings + IANA_SUPPORTED: + if cp_isolation and encoding_iana not in cp_isolation: + continue + + if cp_exclusion and encoding_iana in cp_exclusion: + continue + + if encoding_iana in tested: + continue + + tested.add(encoding_iana) + + decoded_payload: Optional[str] = None + bom_or_sig_available: bool = sig_encoding == encoding_iana + strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( + encoding_iana + ) + + if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", + encoding_iana, + ) + continue + if encoding_iana in {"utf_7"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.", + encoding_iana, + ) + continue + + try: + is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana) + except (ModuleNotFoundError, ImportError): + logger.log( + TRACE, + "Encoding %s does not provide an IncrementalDecoder", + encoding_iana, + ) + continue + + try: + if is_too_large_sequence and is_multi_byte_decoder is False: + str( + sequences[: int(50e4)] + if strip_sig_or_bom is False + else sequences[len(sig_payload) : int(50e4)], + encoding=encoding_iana, + ) + else: + decoded_payload = str( + sequences + if strip_sig_or_bom is False + else sequences[len(sig_payload) :], + encoding=encoding_iana, + ) + except (UnicodeDecodeError, LookupError) as e: + if not isinstance(e, LookupError): + logger.log( + TRACE, + "Code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + similar_soft_failure_test: bool = False + + for encoding_soft_failed in tested_but_soft_failure: + if is_cp_similar(encoding_iana, encoding_soft_failed): + similar_soft_failure_test = True + break + + if similar_soft_failure_test: + logger.log( + TRACE, + "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", + encoding_iana, + encoding_soft_failed, + ) + continue + + r_ = range( + 0 if not bom_or_sig_available else len(sig_payload), + length, + int(length / steps), + ) + + multi_byte_bonus: bool = ( + is_multi_byte_decoder + and decoded_payload is not None + and len(decoded_payload) < length + ) + + if multi_byte_bonus: + logger.log( + TRACE, + "Code page %s is a multi byte encoding table and it appear that at least one character " + "was encoded using n-bytes.", + encoding_iana, + ) + + max_chunk_gave_up: int = int(len(r_) / 4) + + max_chunk_gave_up = max(max_chunk_gave_up, 2) + early_stop_count: int = 0 + lazy_str_hard_failure = False + + md_chunks: List[str] = [] + md_ratios = [] + + try: + for chunk in cut_sequence_chunks( + sequences, + encoding_iana, + r_, + chunk_size, + bom_or_sig_available, + strip_sig_or_bom, + sig_payload, + is_multi_byte_decoder, + decoded_payload, + ): + md_chunks.append(chunk) + + md_ratios.append( + mess_ratio( + chunk, + threshold, + explain is True and 1 <= len(cp_isolation) <= 2, + ) + ) + + if md_ratios[-1] >= threshold: + early_stop_count += 1 + + if (early_stop_count >= max_chunk_gave_up) or ( + bom_or_sig_available and strip_sig_or_bom is False + ): + break + except ( + UnicodeDecodeError + ) as e: # Lazy str loading may have missed something there + logger.log( + TRACE, + "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + early_stop_count = max_chunk_gave_up + lazy_str_hard_failure = True + + # We might want to check the sequence again with the whole content + # Only if initial MD tests passes + if ( + not lazy_str_hard_failure + and is_too_large_sequence + and not is_multi_byte_decoder + ): + try: + sequences[int(50e3) :].decode(encoding_iana, errors="strict") + except UnicodeDecodeError as e: + logger.log( + TRACE, + "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 + if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: + tested_but_soft_failure.append(encoding_iana) + logger.log( + TRACE, + "%s was excluded because of initial chaos probing. Gave up %i time(s). " + "Computed mean chaos is %f %%.", + encoding_iana, + early_stop_count, + round(mean_mess_ratio * 100, ndigits=3), + ) + # Preparing those fallbacks in case we got nothing. + if ( + encoding_iana in ["ascii", "utf_8", specified_encoding] + and not lazy_str_hard_failure + ): + fallback_entry = CharsetMatch( + sequences, encoding_iana, threshold, False, [], decoded_payload + ) + if encoding_iana == specified_encoding: + fallback_specified = fallback_entry + elif encoding_iana == "ascii": + fallback_ascii = fallback_entry + else: + fallback_u8 = fallback_entry + continue + + logger.log( + TRACE, + "%s passed initial chaos probing. Mean measured chaos is %f %%", + encoding_iana, + round(mean_mess_ratio * 100, ndigits=3), + ) + + if not is_multi_byte_decoder: + target_languages: List[str] = encoding_languages(encoding_iana) + else: + target_languages = mb_encoding_languages(encoding_iana) + + if target_languages: + logger.log( + TRACE, + "{} should target any language(s) of {}".format( + encoding_iana, str(target_languages) + ), + ) + + cd_ratios = [] + + # We shall skip the CD when its about ASCII + # Most of the time its not relevant to run "language-detection" on it. + if encoding_iana != "ascii": + for chunk in md_chunks: + chunk_languages = coherence_ratio( + chunk, + language_threshold, + ",".join(target_languages) if target_languages else None, + ) + + cd_ratios.append(chunk_languages) + + cd_ratios_merged = merge_coherence_ratios(cd_ratios) + + if cd_ratios_merged: + logger.log( + TRACE, + "We detected language {} using {}".format( + cd_ratios_merged, encoding_iana + ), + ) + + results.append( + CharsetMatch( + sequences, + encoding_iana, + mean_mess_ratio, + bom_or_sig_available, + cd_ratios_merged, + decoded_payload, + ) + ) + + if ( + encoding_iana in [specified_encoding, "ascii", "utf_8"] + and mean_mess_ratio < 0.1 + ): + logger.debug( + "Encoding detection: %s is most likely the one.", encoding_iana + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if encoding_iana == sig_encoding: + logger.debug( + "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " + "the beginning of the sequence.", + encoding_iana, + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if len(results) == 0: + if fallback_u8 or fallback_ascii or fallback_specified: + logger.log( + TRACE, + "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", + ) + + if fallback_specified: + logger.debug( + "Encoding detection: %s will be used as a fallback match", + fallback_specified.encoding, + ) + results.append(fallback_specified) + elif ( + (fallback_u8 and fallback_ascii is None) + or ( + fallback_u8 + and fallback_ascii + and fallback_u8.fingerprint != fallback_ascii.fingerprint + ) + or (fallback_u8 is not None) + ): + logger.debug("Encoding detection: utf_8 will be used as a fallback match") + results.append(fallback_u8) + elif fallback_ascii: + logger.debug("Encoding detection: ascii will be used as a fallback match") + results.append(fallback_ascii) + + if results: + logger.debug( + "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", + results.best().encoding, # type: ignore + len(results) - 1, + ) + else: + logger.debug("Encoding detection: Unable to determine any suitable charset.") + + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + + return results + + +def from_fp( + fp: BinaryIO, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but using a file pointer that is already ready. + Will not close the file pointer. + """ + return from_bytes( + fp.read(), + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + language_threshold, + ) + + +def from_path( + path: "PathLike[Any]", + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. + Can raise IOError. + """ + with open(path, "rb") as fp: + return from_fp( + fp, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + language_threshold, + ) diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/assets/__init__.py b/venv/lib/python3.10/site-packages/charset_normalizer/assets/__init__.py new file mode 100644 index 0000000..9075930 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer/assets/__init__.py @@ -0,0 +1,1440 @@ +# -*- coding: utf-8 -*- +from typing import Dict, List + +# Language label that contain the em dash "—" +# character are to be considered alternative seq to origin +FREQUENCIES: Dict[str, List[str]] = { + "English": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "u", + "m", + "f", + "p", + "g", + "w", + "y", + "b", + "v", + "k", + "x", + "j", + "z", + "q", + ], + "English—": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "m", + "u", + "f", + "p", + "g", + "w", + "b", + "y", + "v", + "k", + "j", + "x", + "z", + "q", + ], + "German": [ + "e", + "n", + "i", + "r", + "s", + "t", + "a", + "d", + "h", + "u", + "l", + "g", + "o", + "c", + "m", + "b", + "f", + "k", + "w", + "z", + "p", + "v", + "ü", + "ä", + "ö", + "j", + ], + "French": [ + "e", + "a", + "s", + "n", + "i", + "t", + "r", + "l", + "u", + "o", + "d", + "c", + "p", + "m", + "é", + "v", + "g", + "f", + "b", + "h", + "q", + "à", + "x", + "è", + "y", + "j", + ], + "Dutch": [ + "e", + "n", + "a", + "i", + "r", + "t", + "o", + "d", + "s", + "l", + "g", + "h", + "v", + "m", + "u", + "k", + "c", + "p", + "b", + "w", + "j", + "z", + "f", + "y", + "x", + "ë", + ], + "Italian": [ + "e", + "i", + "a", + "o", + "n", + "l", + "t", + "r", + "s", + "c", + "d", + "u", + "p", + "m", + "g", + "v", + "f", + "b", + "z", + "h", + "q", + "è", + "à", + "k", + "y", + "ò", + ], + "Polish": [ + "a", + "i", + "o", + "e", + "n", + "r", + "z", + "w", + "s", + "c", + "t", + "k", + "y", + "d", + "p", + "m", + "u", + "l", + "j", + "ł", + "g", + "b", + "h", + "ą", + "ę", + "ó", + ], + "Spanish": [ + "e", + "a", + "o", + "n", + "s", + "r", + "i", + "l", + "d", + "t", + "c", + "u", + "m", + "p", + "b", + "g", + "v", + "f", + "y", + "ó", + "h", + "q", + "í", + "j", + "z", + "á", + ], + "Russian": [ + "о", + "а", + "е", + "и", + "н", + "с", + "т", + "р", + "в", + "л", + "к", + "м", + "д", + "п", + "у", + "г", + "я", + "ы", + "з", + "б", + "й", + "ь", + "ч", + "х", + "ж", + "ц", + ], + # Jap-Kanji + "Japanese": [ + "人", + "一", + "大", + "亅", + "丁", + "丨", + "竹", + "笑", + "口", + "日", + "今", + "二", + "彳", + "行", + "十", + "土", + "丶", + "寸", + "寺", + "時", + "乙", + "丿", + "乂", + "气", + "気", + "冂", + "巾", + "亠", + "市", + "目", + "儿", + "見", + "八", + "小", + "凵", + "県", + "月", + "彐", + "門", + "間", + "木", + "東", + "山", + "出", + "本", + "中", + "刀", + "分", + "耳", + "又", + "取", + "最", + "言", + "田", + "心", + "思", + "刂", + "前", + "京", + "尹", + "事", + "生", + "厶", + "云", + "会", + "未", + "来", + "白", + "冫", + "楽", + "灬", + "馬", + "尸", + "尺", + "駅", + "明", + "耂", + "者", + "了", + "阝", + "都", + "高", + "卜", + "占", + "厂", + "广", + "店", + "子", + "申", + "奄", + "亻", + "俺", + "上", + "方", + "冖", + "学", + "衣", + "艮", + "食", + "自", + ], + # Jap-Katakana + "Japanese—": [ + "ー", + "ン", + "ス", + "・", + "ル", + "ト", + "リ", + "イ", + "ア", + "ラ", + "ッ", + "ク", + "ド", + "シ", + "レ", + "ジ", + "タ", + "フ", + "ロ", + "カ", + "テ", + "マ", + "ィ", + "グ", + "バ", + "ム", + "プ", + "オ", + "コ", + "デ", + "ニ", + "ウ", + "メ", + "サ", + "ビ", + "ナ", + "ブ", + "ャ", + "エ", + "ュ", + "チ", + "キ", + "ズ", + "ダ", + "パ", + "ミ", + "ェ", + "ョ", + "ハ", + "セ", + "ベ", + "ガ", + "モ", + "ツ", + "ネ", + "ボ", + "ソ", + "ノ", + "ァ", + "ヴ", + "ワ", + "ポ", + "ペ", + "ピ", + "ケ", + "ゴ", + "ギ", + "ザ", + "ホ", + "ゲ", + "ォ", + "ヤ", + "ヒ", + "ユ", + "ヨ", + "ヘ", + "ゼ", + "ヌ", + "ゥ", + "ゾ", + "ヶ", + "ヂ", + "ヲ", + "ヅ", + "ヵ", + "ヱ", + "ヰ", + "ヮ", + "ヽ", + "゠", + "ヾ", + "ヷ", + "ヿ", + "ヸ", + "ヹ", + "ヺ", + ], + # Jap-Hiragana + "Japanese——": [ + "の", + "に", + "る", + "た", + "と", + "は", + "し", + "い", + "を", + "で", + "て", + "が", + "な", + "れ", + "か", + "ら", + "さ", + "っ", + "り", + "す", + "あ", + "も", + "こ", + "ま", + "う", + "く", + "よ", + "き", + "ん", + "め", + "お", + "け", + "そ", + "つ", + "だ", + "や", + "え", + "ど", + "わ", + "ち", + "み", + "せ", + "じ", + "ば", + "へ", + "び", + "ず", + "ろ", + "ほ", + "げ", + "む", + "べ", + "ひ", + "ょ", + "ゆ", + "ぶ", + "ご", + "ゃ", + "ね", + "ふ", + "ぐ", + "ぎ", + "ぼ", + "ゅ", + "づ", + "ざ", + "ぞ", + "ぬ", + "ぜ", + "ぱ", + "ぽ", + "ぷ", + "ぴ", + "ぃ", + "ぁ", + "ぇ", + "ぺ", + "ゞ", + "ぢ", + "ぉ", + "ぅ", + "ゐ", + "ゝ", + "ゑ", + "゛", + "゜", + "ゎ", + "ゔ", + "゚", + "ゟ", + "゙", + "ゕ", + "ゖ", + ], + "Portuguese": [ + "a", + "e", + "o", + "s", + "i", + "r", + "d", + "n", + "t", + "m", + "u", + "c", + "l", + "p", + "g", + "v", + "b", + "f", + "h", + "ã", + "q", + "é", + "ç", + "á", + "z", + "í", + ], + "Swedish": [ + "e", + "a", + "n", + "r", + "t", + "s", + "i", + "l", + "d", + "o", + "m", + "k", + "g", + "v", + "h", + "f", + "u", + "p", + "ä", + "c", + "b", + "ö", + "å", + "y", + "j", + "x", + ], + "Chinese": [ + "的", + "一", + "是", + "不", + "了", + "在", + "人", + "有", + "我", + "他", + "这", + "个", + "们", + "中", + "来", + "上", + "大", + "为", + "和", + "国", + "地", + "到", + "以", + "说", + "时", + "要", + "就", + "出", + "会", + "可", + "也", + "你", + "对", + "生", + "能", + "而", + "子", + "那", + "得", + "于", + "着", + "下", + "自", + "之", + "年", + "过", + "发", + "后", + "作", + "里", + "用", + "道", + "行", + "所", + "然", + "家", + "种", + "事", + "成", + "方", + "多", + "经", + "么", + "去", + "法", + "学", + "如", + "都", + "同", + "现", + "当", + "没", + "动", + "面", + "起", + "看", + "定", + "天", + "分", + "还", + "进", + "好", + "小", + "部", + "其", + "些", + "主", + "样", + "理", + "心", + "她", + "本", + "前", + "开", + "但", + "因", + "只", + "从", + "想", + "实", + ], + "Ukrainian": [ + "о", + "а", + "н", + "і", + "и", + "р", + "в", + "т", + "е", + "с", + "к", + "л", + "у", + "д", + "м", + "п", + "з", + "я", + "ь", + "б", + "г", + "й", + "ч", + "х", + "ц", + "ї", + ], + "Norwegian": [ + "e", + "r", + "n", + "t", + "a", + "s", + "i", + "o", + "l", + "d", + "g", + "k", + "m", + "v", + "f", + "p", + "u", + "b", + "h", + "å", + "y", + "j", + "ø", + "c", + "æ", + "w", + ], + "Finnish": [ + "a", + "i", + "n", + "t", + "e", + "s", + "l", + "o", + "u", + "k", + "ä", + "m", + "r", + "v", + "j", + "h", + "p", + "y", + "d", + "ö", + "g", + "c", + "b", + "f", + "w", + "z", + ], + "Vietnamese": [ + "n", + "h", + "t", + "i", + "c", + "g", + "a", + "o", + "u", + "m", + "l", + "r", + "à", + "đ", + "s", + "e", + "v", + "p", + "b", + "y", + "ư", + "d", + "á", + "k", + "ộ", + "ế", + ], + "Czech": [ + "o", + "e", + "a", + "n", + "t", + "s", + "i", + "l", + "v", + "r", + "k", + "d", + "u", + "m", + "p", + "í", + "c", + "h", + "z", + "á", + "y", + "j", + "b", + "ě", + "é", + "ř", + ], + "Hungarian": [ + "e", + "a", + "t", + "l", + "s", + "n", + "k", + "r", + "i", + "o", + "z", + "á", + "é", + "g", + "m", + "b", + "y", + "v", + "d", + "h", + "u", + "p", + "j", + "ö", + "f", + "c", + ], + "Korean": [ + "이", + "다", + "에", + "의", + "는", + "로", + "하", + "을", + "가", + "고", + "지", + "서", + "한", + "은", + "기", + "으", + "년", + "대", + "사", + "시", + "를", + "리", + "도", + "인", + "스", + "일", + ], + "Indonesian": [ + "a", + "n", + "e", + "i", + "r", + "t", + "u", + "s", + "d", + "k", + "m", + "l", + "g", + "p", + "b", + "o", + "h", + "y", + "j", + "c", + "w", + "f", + "v", + "z", + "x", + "q", + ], + "Turkish": [ + "a", + "e", + "i", + "n", + "r", + "l", + "ı", + "k", + "d", + "t", + "s", + "m", + "y", + "u", + "o", + "b", + "ü", + "ş", + "v", + "g", + "z", + "h", + "c", + "p", + "ç", + "ğ", + ], + "Romanian": [ + "e", + "i", + "a", + "r", + "n", + "t", + "u", + "l", + "o", + "c", + "s", + "d", + "p", + "m", + "ă", + "f", + "v", + "î", + "g", + "b", + "ș", + "ț", + "z", + "h", + "â", + "j", + ], + "Farsi": [ + "ا", + "ی", + "ر", + "د", + "ن", + "ه", + "و", + "م", + "ت", + "ب", + "س", + "ل", + "ک", + "ش", + "ز", + "ف", + "گ", + "ع", + "خ", + "ق", + "ج", + "آ", + "پ", + "ح", + "ط", + "ص", + ], + "Arabic": [ + "ا", + "ل", + "ي", + "م", + "و", + "ن", + "ر", + "ت", + "ب", + "ة", + "ع", + "د", + "س", + "ف", + "ه", + "ك", + "ق", + "أ", + "ح", + "ج", + "ش", + "ط", + "ص", + "ى", + "خ", + "إ", + ], + "Danish": [ + "e", + "r", + "n", + "t", + "a", + "i", + "s", + "d", + "l", + "o", + "g", + "m", + "k", + "f", + "v", + "u", + "b", + "h", + "p", + "å", + "y", + "ø", + "æ", + "c", + "j", + "w", + ], + "Serbian": [ + "а", + "и", + "о", + "е", + "н", + "р", + "с", + "у", + "т", + "к", + "ј", + "в", + "д", + "м", + "п", + "л", + "г", + "з", + "б", + "a", + "i", + "e", + "o", + "n", + "ц", + "ш", + ], + "Lithuanian": [ + "i", + "a", + "s", + "o", + "r", + "e", + "t", + "n", + "u", + "k", + "m", + "l", + "p", + "v", + "d", + "j", + "g", + "ė", + "b", + "y", + "ų", + "š", + "ž", + "c", + "ą", + "į", + ], + "Slovene": [ + "e", + "a", + "i", + "o", + "n", + "r", + "s", + "l", + "t", + "j", + "v", + "k", + "d", + "p", + "m", + "u", + "z", + "b", + "g", + "h", + "č", + "c", + "š", + "ž", + "f", + "y", + ], + "Slovak": [ + "o", + "a", + "e", + "n", + "i", + "r", + "v", + "t", + "s", + "l", + "k", + "d", + "m", + "p", + "u", + "c", + "h", + "j", + "b", + "z", + "á", + "y", + "ý", + "í", + "č", + "é", + ], + "Hebrew": [ + "י", + "ו", + "ה", + "ל", + "ר", + "ב", + "ת", + "מ", + "א", + "ש", + "נ", + "ע", + "ם", + "ד", + "ק", + "ח", + "פ", + "ס", + "כ", + "ג", + "ט", + "צ", + "ן", + "ז", + "ך", + ], + "Bulgarian": [ + "а", + "и", + "о", + "е", + "н", + "т", + "р", + "с", + "в", + "л", + "к", + "д", + "п", + "м", + "з", + "г", + "я", + "ъ", + "у", + "б", + "ч", + "ц", + "й", + "ж", + "щ", + "х", + ], + "Croatian": [ + "a", + "i", + "o", + "e", + "n", + "r", + "j", + "s", + "t", + "u", + "k", + "l", + "v", + "d", + "m", + "p", + "g", + "z", + "b", + "c", + "č", + "h", + "š", + "ž", + "ć", + "f", + ], + "Hindi": [ + "क", + "र", + "स", + "न", + "त", + "म", + "ह", + "प", + "य", + "ल", + "व", + "ज", + "द", + "ग", + "ब", + "श", + "ट", + "अ", + "ए", + "थ", + "भ", + "ड", + "च", + "ध", + "ष", + "इ", + ], + "Estonian": [ + "a", + "i", + "e", + "s", + "t", + "l", + "u", + "n", + "o", + "k", + "r", + "d", + "m", + "v", + "g", + "p", + "j", + "h", + "ä", + "b", + "õ", + "ü", + "f", + "c", + "ö", + "y", + ], + "Thai": [ + "า", + "น", + "ร", + "อ", + "ก", + "เ", + "ง", + "ม", + "ย", + "ล", + "ว", + "ด", + "ท", + "ส", + "ต", + "ะ", + "ป", + "บ", + "ค", + "ห", + "แ", + "จ", + "พ", + "ช", + "ข", + "ใ", + ], + "Greek": [ + "α", + "τ", + "ο", + "ι", + "ε", + "ν", + "ρ", + "σ", + "κ", + "η", + "π", + "ς", + "υ", + "μ", + "λ", + "ί", + "ό", + "ά", + "γ", + "έ", + "δ", + "ή", + "ω", + "χ", + "θ", + "ύ", + ], + "Tamil": [ + "க", + "த", + "ப", + "ட", + "ர", + "ம", + "ல", + "ன", + "வ", + "ற", + "ய", + "ள", + "ச", + "ந", + "இ", + "ண", + "அ", + "ஆ", + "ழ", + "ங", + "எ", + "உ", + "ஒ", + "ஸ", + ], + "Kazakh": [ + "а", + "ы", + "е", + "н", + "т", + "р", + "л", + "і", + "д", + "с", + "м", + "қ", + "к", + "о", + "б", + "и", + "у", + "ғ", + "ж", + "ң", + "з", + "ш", + "й", + "п", + "г", + "ө", + ], +} diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/assets/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/assets/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..91f16c9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/assets/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/cd.py b/venv/lib/python3.10/site-packages/charset_normalizer/cd.py new file mode 100644 index 0000000..6e56fe8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer/cd.py @@ -0,0 +1,390 @@ +import importlib +from codecs import IncrementalDecoder +from collections import Counter +from functools import lru_cache +from typing import Counter as TypeCounter, Dict, List, Optional, Tuple + +from .assets import FREQUENCIES +from .constant import KO_NAMES, LANGUAGE_SUPPORTED_COUNT, TOO_SMALL_SEQUENCE, ZH_NAMES +from .md import is_suspiciously_successive_range +from .models import CoherenceMatches +from .utils import ( + is_accentuated, + is_latin, + is_multi_byte_encoding, + is_unicode_range_secondary, + unicode_range, +) + + +def encoding_unicode_range(iana_name: str) -> List[str]: + """ + Return associated unicode ranges in a single byte code page. + """ + if is_multi_byte_encoding(iana_name): + raise IOError("Function not supported on multi-byte code page") + + decoder = importlib.import_module( + "encodings.{}".format(iana_name) + ).IncrementalDecoder + + p: IncrementalDecoder = decoder(errors="ignore") + seen_ranges: Dict[str, int] = {} + character_count: int = 0 + + for i in range(0x40, 0xFF): + chunk: str = p.decode(bytes([i])) + + if chunk: + character_range: Optional[str] = unicode_range(chunk) + + if character_range is None: + continue + + if is_unicode_range_secondary(character_range) is False: + if character_range not in seen_ranges: + seen_ranges[character_range] = 0 + seen_ranges[character_range] += 1 + character_count += 1 + + return sorted( + [ + character_range + for character_range in seen_ranges + if seen_ranges[character_range] / character_count >= 0.15 + ] + ) + + +def unicode_range_languages(primary_range: str) -> List[str]: + """ + Return inferred languages used with a unicode range. + """ + languages: List[str] = [] + + for language, characters in FREQUENCIES.items(): + for character in characters: + if unicode_range(character) == primary_range: + languages.append(language) + break + + return languages + + +@lru_cache() +def encoding_languages(iana_name: str) -> List[str]: + """ + Single-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + unicode_ranges: List[str] = encoding_unicode_range(iana_name) + primary_range: Optional[str] = None + + for specified_range in unicode_ranges: + if "Latin" not in specified_range: + primary_range = specified_range + break + + if primary_range is None: + return ["Latin Based"] + + return unicode_range_languages(primary_range) + + +@lru_cache() +def mb_encoding_languages(iana_name: str) -> List[str]: + """ + Multi-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + if ( + iana_name.startswith("shift_") + or iana_name.startswith("iso2022_jp") + or iana_name.startswith("euc_j") + or iana_name == "cp932" + ): + return ["Japanese"] + if iana_name.startswith("gb") or iana_name in ZH_NAMES: + return ["Chinese"] + if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: + return ["Korean"] + + return [] + + +@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) +def get_target_features(language: str) -> Tuple[bool, bool]: + """ + Determine main aspects from a supported language if it contains accents and if is pure Latin. + """ + target_have_accents: bool = False + target_pure_latin: bool = True + + for character in FREQUENCIES[language]: + if not target_have_accents and is_accentuated(character): + target_have_accents = True + if target_pure_latin and is_latin(character) is False: + target_pure_latin = False + + return target_have_accents, target_pure_latin + + +def alphabet_languages( + characters: List[str], ignore_non_latin: bool = False +) -> List[str]: + """ + Return associated languages associated to given characters. + """ + languages: List[Tuple[str, float]] = [] + + source_have_accents = any(is_accentuated(character) for character in characters) + + for language, language_characters in FREQUENCIES.items(): + target_have_accents, target_pure_latin = get_target_features(language) + + if ignore_non_latin and target_pure_latin is False: + continue + + if target_have_accents is False and source_have_accents: + continue + + character_count: int = len(language_characters) + + character_match_count: int = len( + [c for c in language_characters if c in characters] + ) + + ratio: float = character_match_count / character_count + + if ratio >= 0.2: + languages.append((language, ratio)) + + languages = sorted(languages, key=lambda x: x[1], reverse=True) + + return [compatible_language[0] for compatible_language in languages] + + +def characters_popularity_compare( + language: str, ordered_characters: List[str] +) -> float: + """ + Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. + The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). + Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) + """ + if language not in FREQUENCIES: + raise ValueError("{} not available".format(language)) + + character_approved_count: int = 0 + FREQUENCIES_language_set = set(FREQUENCIES[language]) + + ordered_characters_count: int = len(ordered_characters) + target_language_characters_count: int = len(FREQUENCIES[language]) + + large_alphabet: bool = target_language_characters_count > 26 + + for character, character_rank in zip( + ordered_characters, range(0, ordered_characters_count) + ): + if character not in FREQUENCIES_language_set: + continue + + character_rank_in_language: int = FREQUENCIES[language].index(character) + expected_projection_ratio: float = ( + target_language_characters_count / ordered_characters_count + ) + character_rank_projection: int = int(character_rank * expected_projection_ratio) + + if ( + large_alphabet is False + and abs(character_rank_projection - character_rank_in_language) > 4 + ): + continue + + if ( + large_alphabet is True + and abs(character_rank_projection - character_rank_in_language) + < target_language_characters_count / 3 + ): + character_approved_count += 1 + continue + + characters_before_source: List[str] = FREQUENCIES[language][ + 0:character_rank_in_language + ] + characters_after_source: List[str] = FREQUENCIES[language][ + character_rank_in_language: + ] + characters_before: List[str] = ordered_characters[0:character_rank] + characters_after: List[str] = ordered_characters[character_rank:] + + before_match_count: int = len( + set(characters_before) & set(characters_before_source) + ) + + after_match_count: int = len( + set(characters_after) & set(characters_after_source) + ) + + if len(characters_before_source) == 0 and before_match_count <= 4: + character_approved_count += 1 + continue + + if len(characters_after_source) == 0 and after_match_count <= 4: + character_approved_count += 1 + continue + + if ( + before_match_count / len(characters_before_source) >= 0.4 + or after_match_count / len(characters_after_source) >= 0.4 + ): + character_approved_count += 1 + continue + + return character_approved_count / len(ordered_characters) + + +def alpha_unicode_split(decoded_sequence: str) -> List[str]: + """ + Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. + Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; + One containing the latin letters and the other hebrew. + """ + layers: Dict[str, str] = {} + + for character in decoded_sequence: + if character.isalpha() is False: + continue + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + continue + + layer_target_range: Optional[str] = None + + for discovered_range in layers: + if ( + is_suspiciously_successive_range(discovered_range, character_range) + is False + ): + layer_target_range = discovered_range + break + + if layer_target_range is None: + layer_target_range = character_range + + if layer_target_range not in layers: + layers[layer_target_range] = character.lower() + continue + + layers[layer_target_range] += character.lower() + + return list(layers.values()) + + +def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches: + """ + This function merge results previously given by the function coherence_ratio. + The return type is the same as coherence_ratio. + """ + per_language_ratios: Dict[str, List[float]] = {} + for result in results: + for sub_result in result: + language, ratio = sub_result + if language not in per_language_ratios: + per_language_ratios[language] = [ratio] + continue + per_language_ratios[language].append(ratio) + + merge = [ + ( + language, + round( + sum(per_language_ratios[language]) / len(per_language_ratios[language]), + 4, + ), + ) + for language in per_language_ratios + ] + + return sorted(merge, key=lambda x: x[1], reverse=True) + + +def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: + """ + We shall NOT return "English—" in CoherenceMatches because it is an alternative + of "English". This function only keeps the best match and remove the em-dash in it. + """ + index_results: Dict[str, List[float]] = dict() + + for result in results: + language, ratio = result + no_em_name: str = language.replace("—", "") + + if no_em_name not in index_results: + index_results[no_em_name] = [] + + index_results[no_em_name].append(ratio) + + if any(len(index_results[e]) > 1 for e in index_results): + filtered_results: CoherenceMatches = [] + + for language in index_results: + filtered_results.append((language, max(index_results[language]))) + + return filtered_results + + return results + + +@lru_cache(maxsize=2048) +def coherence_ratio( + decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None +) -> CoherenceMatches: + """ + Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. + A layer = Character extraction by alphabets/ranges. + """ + + results: List[Tuple[str, float]] = [] + ignore_non_latin: bool = False + + sufficient_match_count: int = 0 + + lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] + if "Latin Based" in lg_inclusion_list: + ignore_non_latin = True + lg_inclusion_list.remove("Latin Based") + + for layer in alpha_unicode_split(decoded_sequence): + sequence_frequencies: TypeCounter[str] = Counter(layer) + most_common = sequence_frequencies.most_common() + + character_count: int = sum(o for c, o in most_common) + + if character_count <= TOO_SMALL_SEQUENCE: + continue + + popular_character_ordered: List[str] = [c for c, o in most_common] + + for language in lg_inclusion_list or alphabet_languages( + popular_character_ordered, ignore_non_latin + ): + ratio: float = characters_popularity_compare( + language, popular_character_ordered + ) + + if ratio < threshold: + continue + elif ratio >= 0.8: + sufficient_match_count += 1 + + results.append((language, round(ratio, 4))) + + if sufficient_match_count >= 3: + break + + return sorted( + filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True + ) diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/cli/__init__.py b/venv/lib/python3.10/site-packages/charset_normalizer/cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..9a8f4a3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/normalizer.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/normalizer.cpython-310.pyc new file mode 100644 index 0000000..684bf42 Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/normalizer.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/cli/normalizer.py b/venv/lib/python3.10/site-packages/charset_normalizer/cli/normalizer.py new file mode 100644 index 0000000..f4bcbaa --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer/cli/normalizer.py @@ -0,0 +1,296 @@ +import argparse +import sys +from json import dumps +from os.path import abspath, basename, dirname, join, realpath +from platform import python_version +from typing import List, Optional +from unicodedata import unidata_version + +import charset_normalizer.md as md_module +from charset_normalizer import from_fp +from charset_normalizer.models import CliDetectionResult +from charset_normalizer.version import __version__ + + +def query_yes_no(question: str, default: str = "yes") -> bool: + """Ask a yes/no question via input() and return their answer. + + "question" is a string that is presented to the user. + "default" is the presumed answer if the user just hits . + It must be "yes" (the default), "no" or None (meaning + an answer is required of the user). + + The "answer" return value is True for "yes" or False for "no". + + Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input + """ + valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} + if default is None: + prompt = " [y/n] " + elif default == "yes": + prompt = " [Y/n] " + elif default == "no": + prompt = " [y/N] " + else: + raise ValueError("invalid default answer: '%s'" % default) + + while True: + sys.stdout.write(question + prompt) + choice = input().lower() + if default is not None and choice == "": + return valid[default] + elif choice in valid: + return valid[choice] + else: + sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") + + +def cli_detect(argv: Optional[List[str]] = None) -> int: + """ + CLI assistant using ARGV and ArgumentParser + :param argv: + :return: 0 if everything is fine, anything else equal trouble + """ + parser = argparse.ArgumentParser( + description="The Real First Universal Charset Detector. " + "Discover originating encoding used on text file. " + "Normalize text to unicode." + ) + + parser.add_argument( + "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed" + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + dest="verbose", + help="Display complementary information about file if any. " + "Stdout will contain logs about the detection process.", + ) + parser.add_argument( + "-a", + "--with-alternative", + action="store_true", + default=False, + dest="alternatives", + help="Output complementary possibilities if any. Top-level JSON WILL be a list.", + ) + parser.add_argument( + "-n", + "--normalize", + action="store_true", + default=False, + dest="normalize", + help="Permit to normalize input file. If not set, program does not write anything.", + ) + parser.add_argument( + "-m", + "--minimal", + action="store_true", + default=False, + dest="minimal", + help="Only output the charset detected to STDOUT. Disabling JSON output.", + ) + parser.add_argument( + "-r", + "--replace", + action="store_true", + default=False, + dest="replace", + help="Replace file when trying to normalize it instead of creating a new one.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + dest="force", + help="Replace file without asking if you are sure, use this flag with caution.", + ) + parser.add_argument( + "-t", + "--threshold", + action="store", + default=0.2, + type=float, + dest="threshold", + help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.", + ) + parser.add_argument( + "--version", + action="version", + version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format( + __version__, + python_version(), + unidata_version, + "OFF" if md_module.__file__.lower().endswith(".py") else "ON", + ), + help="Show version information and exit.", + ) + + args = parser.parse_args(argv) + + if args.replace is True and args.normalize is False: + print("Use --replace in addition of --normalize only.", file=sys.stderr) + return 1 + + if args.force is True and args.replace is False: + print("Use --force in addition of --replace only.", file=sys.stderr) + return 1 + + if args.threshold < 0.0 or args.threshold > 1.0: + print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) + return 1 + + x_ = [] + + for my_file in args.files: + matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose) + + best_guess = matches.best() + + if best_guess is None: + print( + 'Unable to identify originating encoding for "{}". {}'.format( + my_file.name, + "Maybe try increasing maximum amount of chaos." + if args.threshold < 1.0 + else "", + ), + file=sys.stderr, + ) + x_.append( + CliDetectionResult( + abspath(my_file.name), + None, + [], + [], + "Unknown", + [], + False, + 1.0, + 0.0, + None, + True, + ) + ) + else: + x_.append( + CliDetectionResult( + abspath(my_file.name), + best_guess.encoding, + best_guess.encoding_aliases, + [ + cp + for cp in best_guess.could_be_from_charset + if cp != best_guess.encoding + ], + best_guess.language, + best_guess.alphabets, + best_guess.bom, + best_guess.percent_chaos, + best_guess.percent_coherence, + None, + True, + ) + ) + + if len(matches) > 1 and args.alternatives: + for el in matches: + if el != best_guess: + x_.append( + CliDetectionResult( + abspath(my_file.name), + el.encoding, + el.encoding_aliases, + [ + cp + for cp in el.could_be_from_charset + if cp != el.encoding + ], + el.language, + el.alphabets, + el.bom, + el.percent_chaos, + el.percent_coherence, + None, + False, + ) + ) + + if args.normalize is True: + if best_guess.encoding.startswith("utf") is True: + print( + '"{}" file does not need to be normalized, as it already came from unicode.'.format( + my_file.name + ), + file=sys.stderr, + ) + if my_file.closed is False: + my_file.close() + continue + + dir_path = dirname(realpath(my_file.name)) + file_name = basename(realpath(my_file.name)) + + o_: List[str] = file_name.split(".") + + if args.replace is False: + o_.insert(-1, best_guess.encoding) + if my_file.closed is False: + my_file.close() + elif ( + args.force is False + and query_yes_no( + 'Are you sure to normalize "{}" by replacing it ?'.format( + my_file.name + ), + "no", + ) + is False + ): + if my_file.closed is False: + my_file.close() + continue + + try: + x_[0].unicode_path = join(dir_path, ".".join(o_)) + + with open(x_[0].unicode_path, "w", encoding="utf-8") as fp: + fp.write(str(best_guess)) + except IOError as e: + print(str(e), file=sys.stderr) + if my_file.closed is False: + my_file.close() + return 2 + + if my_file.closed is False: + my_file.close() + + if args.minimal is False: + print( + dumps( + [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, + ensure_ascii=True, + indent=4, + ) + ) + else: + for my_file in args.files: + print( + ", ".join( + [ + el.encoding or "undefined" + for el in x_ + if el.path == abspath(my_file.name) + ] + ) + ) + + return 0 + + +if __name__ == "__main__": + cli_detect() diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/constant.py b/venv/lib/python3.10/site-packages/charset_normalizer/constant.py new file mode 100644 index 0000000..3188108 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer/constant.py @@ -0,0 +1,495 @@ +from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE +from encodings.aliases import aliases +from re import IGNORECASE, compile as re_compile +from typing import Dict, List, Set, Union + +from .assets import FREQUENCIES + +# Contain for each eligible encoding a list of/item bytes SIG/BOM +ENCODING_MARKS: Dict[str, Union[bytes, List[bytes]]] = { + "utf_8": BOM_UTF8, + "utf_7": [ + b"\x2b\x2f\x76\x38", + b"\x2b\x2f\x76\x39", + b"\x2b\x2f\x76\x2b", + b"\x2b\x2f\x76\x2f", + b"\x2b\x2f\x76\x38\x2d", + ], + "gb18030": b"\x84\x31\x95\x33", + "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE], + "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE], +} + +TOO_SMALL_SEQUENCE: int = 32 +TOO_BIG_SEQUENCE: int = int(10e6) + +UTF8_MAXIMAL_ALLOCATION: int = 1112064 + +UNICODE_RANGES_COMBINED: Dict[str, range] = { + "Control character": range(31 + 1), + "Basic Latin": range(32, 127 + 1), + "Latin-1 Supplement": range(128, 255 + 1), + "Latin Extended-A": range(256, 383 + 1), + "Latin Extended-B": range(384, 591 + 1), + "IPA Extensions": range(592, 687 + 1), + "Spacing Modifier Letters": range(688, 767 + 1), + "Combining Diacritical Marks": range(768, 879 + 1), + "Greek and Coptic": range(880, 1023 + 1), + "Cyrillic": range(1024, 1279 + 1), + "Cyrillic Supplement": range(1280, 1327 + 1), + "Armenian": range(1328, 1423 + 1), + "Hebrew": range(1424, 1535 + 1), + "Arabic": range(1536, 1791 + 1), + "Syriac": range(1792, 1871 + 1), + "Arabic Supplement": range(1872, 1919 + 1), + "Thaana": range(1920, 1983 + 1), + "NKo": range(1984, 2047 + 1), + "Samaritan": range(2048, 2111 + 1), + "Mandaic": range(2112, 2143 + 1), + "Syriac Supplement": range(2144, 2159 + 1), + "Arabic Extended-A": range(2208, 2303 + 1), + "Devanagari": range(2304, 2431 + 1), + "Bengali": range(2432, 2559 + 1), + "Gurmukhi": range(2560, 2687 + 1), + "Gujarati": range(2688, 2815 + 1), + "Oriya": range(2816, 2943 + 1), + "Tamil": range(2944, 3071 + 1), + "Telugu": range(3072, 3199 + 1), + "Kannada": range(3200, 3327 + 1), + "Malayalam": range(3328, 3455 + 1), + "Sinhala": range(3456, 3583 + 1), + "Thai": range(3584, 3711 + 1), + "Lao": range(3712, 3839 + 1), + "Tibetan": range(3840, 4095 + 1), + "Myanmar": range(4096, 4255 + 1), + "Georgian": range(4256, 4351 + 1), + "Hangul Jamo": range(4352, 4607 + 1), + "Ethiopic": range(4608, 4991 + 1), + "Ethiopic Supplement": range(4992, 5023 + 1), + "Cherokee": range(5024, 5119 + 1), + "Unified Canadian Aboriginal Syllabics": range(5120, 5759 + 1), + "Ogham": range(5760, 5791 + 1), + "Runic": range(5792, 5887 + 1), + "Tagalog": range(5888, 5919 + 1), + "Hanunoo": range(5920, 5951 + 1), + "Buhid": range(5952, 5983 + 1), + "Tagbanwa": range(5984, 6015 + 1), + "Khmer": range(6016, 6143 + 1), + "Mongolian": range(6144, 6319 + 1), + "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6399 + 1), + "Limbu": range(6400, 6479 + 1), + "Tai Le": range(6480, 6527 + 1), + "New Tai Lue": range(6528, 6623 + 1), + "Khmer Symbols": range(6624, 6655 + 1), + "Buginese": range(6656, 6687 + 1), + "Tai Tham": range(6688, 6831 + 1), + "Combining Diacritical Marks Extended": range(6832, 6911 + 1), + "Balinese": range(6912, 7039 + 1), + "Sundanese": range(7040, 7103 + 1), + "Batak": range(7104, 7167 + 1), + "Lepcha": range(7168, 7247 + 1), + "Ol Chiki": range(7248, 7295 + 1), + "Cyrillic Extended C": range(7296, 7311 + 1), + "Sundanese Supplement": range(7360, 7375 + 1), + "Vedic Extensions": range(7376, 7423 + 1), + "Phonetic Extensions": range(7424, 7551 + 1), + "Phonetic Extensions Supplement": range(7552, 7615 + 1), + "Combining Diacritical Marks Supplement": range(7616, 7679 + 1), + "Latin Extended Additional": range(7680, 7935 + 1), + "Greek Extended": range(7936, 8191 + 1), + "General Punctuation": range(8192, 8303 + 1), + "Superscripts and Subscripts": range(8304, 8351 + 1), + "Currency Symbols": range(8352, 8399 + 1), + "Combining Diacritical Marks for Symbols": range(8400, 8447 + 1), + "Letterlike Symbols": range(8448, 8527 + 1), + "Number Forms": range(8528, 8591 + 1), + "Arrows": range(8592, 8703 + 1), + "Mathematical Operators": range(8704, 8959 + 1), + "Miscellaneous Technical": range(8960, 9215 + 1), + "Control Pictures": range(9216, 9279 + 1), + "Optical Character Recognition": range(9280, 9311 + 1), + "Enclosed Alphanumerics": range(9312, 9471 + 1), + "Box Drawing": range(9472, 9599 + 1), + "Block Elements": range(9600, 9631 + 1), + "Geometric Shapes": range(9632, 9727 + 1), + "Miscellaneous Symbols": range(9728, 9983 + 1), + "Dingbats": range(9984, 10175 + 1), + "Miscellaneous Mathematical Symbols-A": range(10176, 10223 + 1), + "Supplemental Arrows-A": range(10224, 10239 + 1), + "Braille Patterns": range(10240, 10495 + 1), + "Supplemental Arrows-B": range(10496, 10623 + 1), + "Miscellaneous Mathematical Symbols-B": range(10624, 10751 + 1), + "Supplemental Mathematical Operators": range(10752, 11007 + 1), + "Miscellaneous Symbols and Arrows": range(11008, 11263 + 1), + "Glagolitic": range(11264, 11359 + 1), + "Latin Extended-C": range(11360, 11391 + 1), + "Coptic": range(11392, 11519 + 1), + "Georgian Supplement": range(11520, 11567 + 1), + "Tifinagh": range(11568, 11647 + 1), + "Ethiopic Extended": range(11648, 11743 + 1), + "Cyrillic Extended-A": range(11744, 11775 + 1), + "Supplemental Punctuation": range(11776, 11903 + 1), + "CJK Radicals Supplement": range(11904, 12031 + 1), + "Kangxi Radicals": range(12032, 12255 + 1), + "Ideographic Description Characters": range(12272, 12287 + 1), + "CJK Symbols and Punctuation": range(12288, 12351 + 1), + "Hiragana": range(12352, 12447 + 1), + "Katakana": range(12448, 12543 + 1), + "Bopomofo": range(12544, 12591 + 1), + "Hangul Compatibility Jamo": range(12592, 12687 + 1), + "Kanbun": range(12688, 12703 + 1), + "Bopomofo Extended": range(12704, 12735 + 1), + "CJK Strokes": range(12736, 12783 + 1), + "Katakana Phonetic Extensions": range(12784, 12799 + 1), + "Enclosed CJK Letters and Months": range(12800, 13055 + 1), + "CJK Compatibility": range(13056, 13311 + 1), + "CJK Unified Ideographs Extension A": range(13312, 19903 + 1), + "Yijing Hexagram Symbols": range(19904, 19967 + 1), + "CJK Unified Ideographs": range(19968, 40959 + 1), + "Yi Syllables": range(40960, 42127 + 1), + "Yi Radicals": range(42128, 42191 + 1), + "Lisu": range(42192, 42239 + 1), + "Vai": range(42240, 42559 + 1), + "Cyrillic Extended-B": range(42560, 42655 + 1), + "Bamum": range(42656, 42751 + 1), + "Modifier Tone Letters": range(42752, 42783 + 1), + "Latin Extended-D": range(42784, 43007 + 1), + "Syloti Nagri": range(43008, 43055 + 1), + "Common Indic Number Forms": range(43056, 43071 + 1), + "Phags-pa": range(43072, 43135 + 1), + "Saurashtra": range(43136, 43231 + 1), + "Devanagari Extended": range(43232, 43263 + 1), + "Kayah Li": range(43264, 43311 + 1), + "Rejang": range(43312, 43359 + 1), + "Hangul Jamo Extended-A": range(43360, 43391 + 1), + "Javanese": range(43392, 43487 + 1), + "Myanmar Extended-B": range(43488, 43519 + 1), + "Cham": range(43520, 43615 + 1), + "Myanmar Extended-A": range(43616, 43647 + 1), + "Tai Viet": range(43648, 43743 + 1), + "Meetei Mayek Extensions": range(43744, 43775 + 1), + "Ethiopic Extended-A": range(43776, 43823 + 1), + "Latin Extended-E": range(43824, 43887 + 1), + "Cherokee Supplement": range(43888, 43967 + 1), + "Meetei Mayek": range(43968, 44031 + 1), + "Hangul Syllables": range(44032, 55215 + 1), + "Hangul Jamo Extended-B": range(55216, 55295 + 1), + "High Surrogates": range(55296, 56191 + 1), + "High Private Use Surrogates": range(56192, 56319 + 1), + "Low Surrogates": range(56320, 57343 + 1), + "Private Use Area": range(57344, 63743 + 1), + "CJK Compatibility Ideographs": range(63744, 64255 + 1), + "Alphabetic Presentation Forms": range(64256, 64335 + 1), + "Arabic Presentation Forms-A": range(64336, 65023 + 1), + "Variation Selectors": range(65024, 65039 + 1), + "Vertical Forms": range(65040, 65055 + 1), + "Combining Half Marks": range(65056, 65071 + 1), + "CJK Compatibility Forms": range(65072, 65103 + 1), + "Small Form Variants": range(65104, 65135 + 1), + "Arabic Presentation Forms-B": range(65136, 65279 + 1), + "Halfwidth and Fullwidth Forms": range(65280, 65519 + 1), + "Specials": range(65520, 65535 + 1), + "Linear B Syllabary": range(65536, 65663 + 1), + "Linear B Ideograms": range(65664, 65791 + 1), + "Aegean Numbers": range(65792, 65855 + 1), + "Ancient Greek Numbers": range(65856, 65935 + 1), + "Ancient Symbols": range(65936, 65999 + 1), + "Phaistos Disc": range(66000, 66047 + 1), + "Lycian": range(66176, 66207 + 1), + "Carian": range(66208, 66271 + 1), + "Coptic Epact Numbers": range(66272, 66303 + 1), + "Old Italic": range(66304, 66351 + 1), + "Gothic": range(66352, 66383 + 1), + "Old Permic": range(66384, 66431 + 1), + "Ugaritic": range(66432, 66463 + 1), + "Old Persian": range(66464, 66527 + 1), + "Deseret": range(66560, 66639 + 1), + "Shavian": range(66640, 66687 + 1), + "Osmanya": range(66688, 66735 + 1), + "Osage": range(66736, 66815 + 1), + "Elbasan": range(66816, 66863 + 1), + "Caucasian Albanian": range(66864, 66927 + 1), + "Linear A": range(67072, 67455 + 1), + "Cypriot Syllabary": range(67584, 67647 + 1), + "Imperial Aramaic": range(67648, 67679 + 1), + "Palmyrene": range(67680, 67711 + 1), + "Nabataean": range(67712, 67759 + 1), + "Hatran": range(67808, 67839 + 1), + "Phoenician": range(67840, 67871 + 1), + "Lydian": range(67872, 67903 + 1), + "Meroitic Hieroglyphs": range(67968, 67999 + 1), + "Meroitic Cursive": range(68000, 68095 + 1), + "Kharoshthi": range(68096, 68191 + 1), + "Old South Arabian": range(68192, 68223 + 1), + "Old North Arabian": range(68224, 68255 + 1), + "Manichaean": range(68288, 68351 + 1), + "Avestan": range(68352, 68415 + 1), + "Inscriptional Parthian": range(68416, 68447 + 1), + "Inscriptional Pahlavi": range(68448, 68479 + 1), + "Psalter Pahlavi": range(68480, 68527 + 1), + "Old Turkic": range(68608, 68687 + 1), + "Old Hungarian": range(68736, 68863 + 1), + "Rumi Numeral Symbols": range(69216, 69247 + 1), + "Brahmi": range(69632, 69759 + 1), + "Kaithi": range(69760, 69839 + 1), + "Sora Sompeng": range(69840, 69887 + 1), + "Chakma": range(69888, 69967 + 1), + "Mahajani": range(69968, 70015 + 1), + "Sharada": range(70016, 70111 + 1), + "Sinhala Archaic Numbers": range(70112, 70143 + 1), + "Khojki": range(70144, 70223 + 1), + "Multani": range(70272, 70319 + 1), + "Khudawadi": range(70320, 70399 + 1), + "Grantha": range(70400, 70527 + 1), + "Newa": range(70656, 70783 + 1), + "Tirhuta": range(70784, 70879 + 1), + "Siddham": range(71040, 71167 + 1), + "Modi": range(71168, 71263 + 1), + "Mongolian Supplement": range(71264, 71295 + 1), + "Takri": range(71296, 71375 + 1), + "Ahom": range(71424, 71487 + 1), + "Warang Citi": range(71840, 71935 + 1), + "Zanabazar Square": range(72192, 72271 + 1), + "Soyombo": range(72272, 72367 + 1), + "Pau Cin Hau": range(72384, 72447 + 1), + "Bhaiksuki": range(72704, 72815 + 1), + "Marchen": range(72816, 72895 + 1), + "Masaram Gondi": range(72960, 73055 + 1), + "Cuneiform": range(73728, 74751 + 1), + "Cuneiform Numbers and Punctuation": range(74752, 74879 + 1), + "Early Dynastic Cuneiform": range(74880, 75087 + 1), + "Egyptian Hieroglyphs": range(77824, 78895 + 1), + "Anatolian Hieroglyphs": range(82944, 83583 + 1), + "Bamum Supplement": range(92160, 92735 + 1), + "Mro": range(92736, 92783 + 1), + "Bassa Vah": range(92880, 92927 + 1), + "Pahawh Hmong": range(92928, 93071 + 1), + "Miao": range(93952, 94111 + 1), + "Ideographic Symbols and Punctuation": range(94176, 94207 + 1), + "Tangut": range(94208, 100351 + 1), + "Tangut Components": range(100352, 101119 + 1), + "Kana Supplement": range(110592, 110847 + 1), + "Kana Extended-A": range(110848, 110895 + 1), + "Nushu": range(110960, 111359 + 1), + "Duployan": range(113664, 113823 + 1), + "Shorthand Format Controls": range(113824, 113839 + 1), + "Byzantine Musical Symbols": range(118784, 119039 + 1), + "Musical Symbols": range(119040, 119295 + 1), + "Ancient Greek Musical Notation": range(119296, 119375 + 1), + "Tai Xuan Jing Symbols": range(119552, 119647 + 1), + "Counting Rod Numerals": range(119648, 119679 + 1), + "Mathematical Alphanumeric Symbols": range(119808, 120831 + 1), + "Sutton SignWriting": range(120832, 121519 + 1), + "Glagolitic Supplement": range(122880, 122927 + 1), + "Mende Kikakui": range(124928, 125151 + 1), + "Adlam": range(125184, 125279 + 1), + "Arabic Mathematical Alphabetic Symbols": range(126464, 126719 + 1), + "Mahjong Tiles": range(126976, 127023 + 1), + "Domino Tiles": range(127024, 127135 + 1), + "Playing Cards": range(127136, 127231 + 1), + "Enclosed Alphanumeric Supplement": range(127232, 127487 + 1), + "Enclosed Ideographic Supplement": range(127488, 127743 + 1), + "Miscellaneous Symbols and Pictographs": range(127744, 128511 + 1), + "Emoticons range(Emoji)": range(128512, 128591 + 1), + "Ornamental Dingbats": range(128592, 128639 + 1), + "Transport and Map Symbols": range(128640, 128767 + 1), + "Alchemical Symbols": range(128768, 128895 + 1), + "Geometric Shapes Extended": range(128896, 129023 + 1), + "Supplemental Arrows-C": range(129024, 129279 + 1), + "Supplemental Symbols and Pictographs": range(129280, 129535 + 1), + "CJK Unified Ideographs Extension B": range(131072, 173791 + 1), + "CJK Unified Ideographs Extension C": range(173824, 177983 + 1), + "CJK Unified Ideographs Extension D": range(177984, 178207 + 1), + "CJK Unified Ideographs Extension E": range(178208, 183983 + 1), + "CJK Unified Ideographs Extension F": range(183984, 191471 + 1), + "CJK Compatibility Ideographs Supplement": range(194560, 195103 + 1), + "Tags": range(917504, 917631 + 1), + "Variation Selectors Supplement": range(917760, 917999 + 1), +} + + +UNICODE_SECONDARY_RANGE_KEYWORD: List[str] = [ + "Supplement", + "Extended", + "Extensions", + "Modifier", + "Marks", + "Punctuation", + "Symbols", + "Forms", + "Operators", + "Miscellaneous", + "Drawing", + "Block", + "Shapes", + "Supplemental", + "Tags", +] + +RE_POSSIBLE_ENCODING_INDICATION = re_compile( + r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", + IGNORECASE, +) + +IANA_SUPPORTED: List[str] = sorted( + filter( + lambda x: x.endswith("_codec") is False + and x not in {"rot_13", "tactis", "mbcs"}, + list(set(aliases.values())), + ) +) + +IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) + +# pre-computed code page that are similar using the function cp_similarity. +IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = { + "cp037": ["cp1026", "cp1140", "cp273", "cp500"], + "cp1026": ["cp037", "cp1140", "cp273", "cp500"], + "cp1125": ["cp866"], + "cp1140": ["cp037", "cp1026", "cp273", "cp500"], + "cp1250": ["iso8859_2"], + "cp1251": ["kz1048", "ptcp154"], + "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1253": ["iso8859_7"], + "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1257": ["iso8859_13"], + "cp273": ["cp037", "cp1026", "cp1140", "cp500"], + "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], + "cp500": ["cp037", "cp1026", "cp1140", "cp273"], + "cp850": ["cp437", "cp857", "cp858", "cp865"], + "cp857": ["cp850", "cp858", "cp865"], + "cp858": ["cp437", "cp850", "cp857", "cp865"], + "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], + "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], + "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], + "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], + "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], + "cp866": ["cp1125"], + "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], + "iso8859_11": ["tis_620"], + "iso8859_13": ["cp1257"], + "iso8859_14": [ + "iso8859_10", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_15": [ + "cp1252", + "cp1254", + "iso8859_10", + "iso8859_14", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_16": [ + "iso8859_14", + "iso8859_15", + "iso8859_2", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], + "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], + "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], + "iso8859_7": ["cp1253"], + "iso8859_9": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "latin_1", + ], + "kz1048": ["cp1251", "ptcp154"], + "latin_1": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "iso8859_9", + ], + "mac_iceland": ["mac_roman", "mac_turkish"], + "mac_roman": ["mac_iceland", "mac_turkish"], + "mac_turkish": ["mac_iceland", "mac_roman"], + "ptcp154": ["cp1251", "kz1048"], + "tis_620": ["iso8859_11"], +} + + +CHARDET_CORRESPONDENCE: Dict[str, str] = { + "iso2022_kr": "ISO-2022-KR", + "iso2022_jp": "ISO-2022-JP", + "euc_kr": "EUC-KR", + "tis_620": "TIS-620", + "utf_32": "UTF-32", + "euc_jp": "EUC-JP", + "koi8_r": "KOI8-R", + "iso8859_1": "ISO-8859-1", + "iso8859_2": "ISO-8859-2", + "iso8859_5": "ISO-8859-5", + "iso8859_6": "ISO-8859-6", + "iso8859_7": "ISO-8859-7", + "iso8859_8": "ISO-8859-8", + "utf_16": "UTF-16", + "cp855": "IBM855", + "mac_cyrillic": "MacCyrillic", + "gb2312": "GB2312", + "gb18030": "GB18030", + "cp932": "CP932", + "cp866": "IBM866", + "utf_8": "utf-8", + "utf_8_sig": "UTF-8-SIG", + "shift_jis": "SHIFT_JIS", + "big5": "Big5", + "cp1250": "windows-1250", + "cp1251": "windows-1251", + "cp1252": "Windows-1252", + "cp1253": "windows-1253", + "cp1255": "windows-1255", + "cp1256": "windows-1256", + "cp1254": "Windows-1254", + "cp949": "CP949", +} + + +COMMON_SAFE_ASCII_CHARACTERS: Set[str] = { + "<", + ">", + "=", + ":", + "/", + "&", + ";", + "{", + "}", + "[", + "]", + ",", + "|", + '"', + "-", +} + + +KO_NAMES: Set[str] = {"johab", "cp949", "euc_kr"} +ZH_NAMES: Set[str] = {"big5", "cp950", "big5hkscs", "hz"} + +LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES) + +# Logging LEVEL below DEBUG +TRACE: int = 5 diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/legacy.py b/venv/lib/python3.10/site-packages/charset_normalizer/legacy.py new file mode 100644 index 0000000..43aad21 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer/legacy.py @@ -0,0 +1,54 @@ +from typing import Any, Dict, Optional, Union +from warnings import warn + +from .api import from_bytes +from .constant import CHARDET_CORRESPONDENCE + + +def detect( + byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any +) -> Dict[str, Optional[Union[str, float]]]: + """ + chardet legacy method + Detect the encoding of the given byte string. It should be mostly backward-compatible. + Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) + This function is deprecated and should be used to migrate your project easily, consult the documentation for + further information. Not planned for removal. + + :param byte_str: The byte sequence to examine. + :param should_rename_legacy: Should we rename legacy encodings + to their more modern equivalents? + """ + if len(kwargs): + warn( + f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()" + ) + + if not isinstance(byte_str, (bytearray, bytes)): + raise TypeError( # pragma: nocover + "Expected object of type bytes or bytearray, got: " + "{0}".format(type(byte_str)) + ) + + if isinstance(byte_str, bytearray): + byte_str = bytes(byte_str) + + r = from_bytes(byte_str).best() + + encoding = r.encoding if r is not None else None + language = r.language if r is not None and r.language != "Unknown" else "" + confidence = 1.0 - r.chaos if r is not None else None + + # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process + # but chardet does return 'utf-8-sig' and it is a valid codec name. + if r is not None and encoding == "utf_8" and r.bom: + encoding += "_sig" + + if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE: + encoding = CHARDET_CORRESPONDENCE[encoding] + + return { + "encoding": encoding, + "language": language, + "confidence": confidence, + } diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/md.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/charset_normalizer/md.cpython-310-x86_64-linux-gnu.so new file mode 100755 index 0000000..0e3becc Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/md.cpython-310-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/md.py b/venv/lib/python3.10/site-packages/charset_normalizer/md.py new file mode 100644 index 0000000..56e9321 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer/md.py @@ -0,0 +1,571 @@ +from functools import lru_cache +from logging import getLogger +from typing import List, Optional + +from .constant import ( + COMMON_SAFE_ASCII_CHARACTERS, + TRACE, + UNICODE_SECONDARY_RANGE_KEYWORD, +) +from .utils import ( + is_accentuated, + is_ascii, + is_case_variable, + is_cjk, + is_emoticon, + is_hangul, + is_hiragana, + is_katakana, + is_latin, + is_punctuation, + is_separator, + is_symbol, + is_thai, + is_unprintable, + remove_accent, + unicode_range, +) + + +class MessDetectorPlugin: + """ + Base abstract class used for mess detection plugins. + All detectors MUST extend and implement given methods. + """ + + def eligible(self, character: str) -> bool: + """ + Determine if given character should be fed in. + """ + raise NotImplementedError # pragma: nocover + + def feed(self, character: str) -> None: + """ + The main routine to be executed upon character. + Insert the logic in witch the text would be considered chaotic. + """ + raise NotImplementedError # pragma: nocover + + def reset(self) -> None: # pragma: no cover + """ + Permit to reset the plugin to the initial state. + """ + raise NotImplementedError + + @property + def ratio(self) -> float: + """ + Compute the chaos ratio based on what your feed() has seen. + Must NOT be lower than 0.; No restriction gt 0. + """ + raise NotImplementedError # pragma: nocover + + +class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._punctuation_count: int = 0 + self._symbol_count: int = 0 + self._character_count: int = 0 + + self._last_printable_char: Optional[str] = None + self._frenzy_symbol_in_word: bool = False + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character != self._last_printable_char + and character not in COMMON_SAFE_ASCII_CHARACTERS + ): + if is_punctuation(character): + self._punctuation_count += 1 + elif ( + character.isdigit() is False + and is_symbol(character) + and is_emoticon(character) is False + ): + self._symbol_count += 2 + + self._last_printable_char = character + + def reset(self) -> None: # pragma: no cover + self._punctuation_count = 0 + self._character_count = 0 + self._symbol_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + ratio_of_punctuation: float = ( + self._punctuation_count + self._symbol_count + ) / self._character_count + + return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 + + +class TooManyAccentuatedPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count: int = 0 + self._accentuated_count: int = 0 + + def eligible(self, character: str) -> bool: + return character.isalpha() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_accentuated(character): + self._accentuated_count += 1 + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._accentuated_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0 or self._character_count < 8: + return 0.0 + ratio_of_accentuation: float = self._accentuated_count / self._character_count + return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 + + +class UnprintablePlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._unprintable_count: int = 0 + self._character_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if is_unprintable(character): + self._unprintable_count += 1 + self._character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._unprintable_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._unprintable_count * 8) / self._character_count + + +class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._successive_count: int = 0 + self._character_count: int = 0 + + self._last_latin_character: Optional[str] = None + + def eligible(self, character: str) -> bool: + return character.isalpha() and is_latin(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + if ( + self._last_latin_character is not None + and is_accentuated(character) + and is_accentuated(self._last_latin_character) + ): + if character.isupper() and self._last_latin_character.isupper(): + self._successive_count += 1 + # Worse if its the same char duplicated with different accent. + if remove_accent(character) == remove_accent(self._last_latin_character): + self._successive_count += 1 + self._last_latin_character = character + + def reset(self) -> None: # pragma: no cover + self._successive_count = 0 + self._character_count = 0 + self._last_latin_character = None + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._successive_count * 2) / self._character_count + + +class SuspiciousRange(MessDetectorPlugin): + def __init__(self) -> None: + self._suspicious_successive_range_count: int = 0 + self._character_count: int = 0 + self._last_printable_seen: Optional[str] = None + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character.isspace() + or is_punctuation(character) + or character in COMMON_SAFE_ASCII_CHARACTERS + ): + self._last_printable_seen = None + return + + if self._last_printable_seen is None: + self._last_printable_seen = character + return + + unicode_range_a: Optional[str] = unicode_range(self._last_printable_seen) + unicode_range_b: Optional[str] = unicode_range(character) + + if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): + self._suspicious_successive_range_count += 1 + + self._last_printable_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._suspicious_successive_range_count = 0 + self._last_printable_seen = None + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + ratio_of_suspicious_range_usage: float = ( + self._suspicious_successive_range_count * 2 + ) / self._character_count + + if ratio_of_suspicious_range_usage < 0.1: + return 0.0 + + return ratio_of_suspicious_range_usage + + +class SuperWeirdWordPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._word_count: int = 0 + self._bad_word_count: int = 0 + self._foreign_long_count: int = 0 + + self._is_current_word_bad: bool = False + self._foreign_long_watch: bool = False + + self._character_count: int = 0 + self._bad_character_count: int = 0 + + self._buffer: str = "" + self._buffer_accent_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character.isalpha(): + self._buffer += character + if is_accentuated(character): + self._buffer_accent_count += 1 + if ( + self._foreign_long_watch is False + and (is_latin(character) is False or is_accentuated(character)) + and is_cjk(character) is False + and is_hangul(character) is False + and is_katakana(character) is False + and is_hiragana(character) is False + and is_thai(character) is False + ): + self._foreign_long_watch = True + return + if not self._buffer: + return + if ( + character.isspace() or is_punctuation(character) or is_separator(character) + ) and self._buffer: + self._word_count += 1 + buffer_length: int = len(self._buffer) + + self._character_count += buffer_length + + if buffer_length >= 4: + if self._buffer_accent_count / buffer_length > 0.34: + self._is_current_word_bad = True + # Word/Buffer ending with a upper case accentuated letter are so rare, + # that we will consider them all as suspicious. Same weight as foreign_long suspicious. + if is_accentuated(self._buffer[-1]) and self._buffer[-1].isupper(): + self._foreign_long_count += 1 + self._is_current_word_bad = True + if buffer_length >= 24 and self._foreign_long_watch: + self._foreign_long_count += 1 + self._is_current_word_bad = True + + if self._is_current_word_bad: + self._bad_word_count += 1 + self._bad_character_count += len(self._buffer) + self._is_current_word_bad = False + + self._foreign_long_watch = False + self._buffer = "" + self._buffer_accent_count = 0 + elif ( + character not in {"<", ">", "-", "=", "~", "|", "_"} + and character.isdigit() is False + and is_symbol(character) + ): + self._is_current_word_bad = True + self._buffer += character + + def reset(self) -> None: # pragma: no cover + self._buffer = "" + self._is_current_word_bad = False + self._foreign_long_watch = False + self._bad_word_count = 0 + self._word_count = 0 + self._character_count = 0 + self._bad_character_count = 0 + self._foreign_long_count = 0 + + @property + def ratio(self) -> float: + if self._word_count <= 10 and self._foreign_long_count == 0: + return 0.0 + + return self._bad_character_count / self._character_count + + +class CjkInvalidStopPlugin(MessDetectorPlugin): + """ + GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and + can be easily detected. Searching for the overuse of '丅' and '丄'. + """ + + def __init__(self) -> None: + self._wrong_stop_count: int = 0 + self._cjk_character_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character in {"丅", "丄"}: + self._wrong_stop_count += 1 + return + if is_cjk(character): + self._cjk_character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._wrong_stop_count = 0 + self._cjk_character_count = 0 + + @property + def ratio(self) -> float: + if self._cjk_character_count < 16: + return 0.0 + return self._wrong_stop_count / self._cjk_character_count + + +class ArchaicUpperLowerPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._buf: bool = False + + self._character_count_since_last_sep: int = 0 + + self._successive_upper_lower_count: int = 0 + self._successive_upper_lower_count_final: int = 0 + + self._character_count: int = 0 + + self._last_alpha_seen: Optional[str] = None + self._current_ascii_only: bool = True + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + is_concerned = character.isalpha() and is_case_variable(character) + chunk_sep = is_concerned is False + + if chunk_sep and self._character_count_since_last_sep > 0: + if ( + self._character_count_since_last_sep <= 64 + and character.isdigit() is False + and self._current_ascii_only is False + ): + self._successive_upper_lower_count_final += ( + self._successive_upper_lower_count + ) + + self._successive_upper_lower_count = 0 + self._character_count_since_last_sep = 0 + self._last_alpha_seen = None + self._buf = False + self._character_count += 1 + self._current_ascii_only = True + + return + + if self._current_ascii_only is True and is_ascii(character) is False: + self._current_ascii_only = False + + if self._last_alpha_seen is not None: + if (character.isupper() and self._last_alpha_seen.islower()) or ( + character.islower() and self._last_alpha_seen.isupper() + ): + if self._buf is True: + self._successive_upper_lower_count += 2 + self._buf = False + else: + self._buf = True + else: + self._buf = False + + self._character_count += 1 + self._character_count_since_last_sep += 1 + self._last_alpha_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._character_count_since_last_sep = 0 + self._successive_upper_lower_count = 0 + self._successive_upper_lower_count_final = 0 + self._last_alpha_seen = None + self._buf = False + self._current_ascii_only = True + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return self._successive_upper_lower_count_final / self._character_count + + +@lru_cache(maxsize=1024) +def is_suspiciously_successive_range( + unicode_range_a: Optional[str], unicode_range_b: Optional[str] +) -> bool: + """ + Determine if two Unicode range seen next to each other can be considered as suspicious. + """ + if unicode_range_a is None or unicode_range_b is None: + return True + + if unicode_range_a == unicode_range_b: + return False + + if "Latin" in unicode_range_a and "Latin" in unicode_range_b: + return False + + if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: + return False + + # Latin characters can be accompanied with a combining diacritical mark + # eg. Vietnamese. + if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( + "Combining" in unicode_range_a or "Combining" in unicode_range_b + ): + return False + + keywords_range_a, keywords_range_b = unicode_range_a.split( + " " + ), unicode_range_b.split(" ") + + for el in keywords_range_a: + if el in UNICODE_SECONDARY_RANGE_KEYWORD: + continue + if el in keywords_range_b: + return False + + # Japanese Exception + range_a_jp_chars, range_b_jp_chars = ( + unicode_range_a + in ( + "Hiragana", + "Katakana", + ), + unicode_range_b in ("Hiragana", "Katakana"), + ) + if (range_a_jp_chars or range_b_jp_chars) and ( + "CJK" in unicode_range_a or "CJK" in unicode_range_b + ): + return False + if range_a_jp_chars and range_b_jp_chars: + return False + + if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: + if "CJK" in unicode_range_a or "CJK" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + # Chinese/Japanese use dedicated range for punctuation and/or separators. + if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( + unicode_range_a in ["Katakana", "Hiragana"] + and unicode_range_b in ["Katakana", "Hiragana"] + ): + if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: + return False + if "Forms" in unicode_range_a or "Forms" in unicode_range_b: + return False + + return True + + +@lru_cache(maxsize=2048) +def mess_ratio( + decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False +) -> float: + """ + Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. + """ + + detectors: List[MessDetectorPlugin] = [ + md_class() for md_class in MessDetectorPlugin.__subclasses__() + ] + + length: int = len(decoded_sequence) + 1 + + mean_mess_ratio: float = 0.0 + + if length < 512: + intermediary_mean_mess_ratio_calc: int = 32 + elif length <= 1024: + intermediary_mean_mess_ratio_calc = 64 + else: + intermediary_mean_mess_ratio_calc = 128 + + for character, index in zip(decoded_sequence + "\n", range(length)): + for detector in detectors: + if detector.eligible(character): + detector.feed(character) + + if ( + index > 0 and index % intermediary_mean_mess_ratio_calc == 0 + ) or index == length - 1: + mean_mess_ratio = sum(dt.ratio for dt in detectors) + + if mean_mess_ratio >= maximum_threshold: + break + + if debug: + logger = getLogger("charset_normalizer") + + logger.log( + TRACE, + "Mess-detector extended-analysis start. " + f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} " + f"maximum_threshold={maximum_threshold}", + ) + + if len(decoded_sequence) > 16: + logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}") + logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}") + + for dt in detectors: # pragma: nocover + logger.log(TRACE, f"{dt.__class__}: {dt.ratio}") + + return round(mean_mess_ratio, 3) diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so new file mode 100755 index 0000000..441d8d7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/models.py b/venv/lib/python3.10/site-packages/charset_normalizer/models.py new file mode 100644 index 0000000..7f8ca38 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer/models.py @@ -0,0 +1,337 @@ +from encodings.aliases import aliases +from hashlib import sha256 +from json import dumps +from typing import Any, Dict, Iterator, List, Optional, Tuple, Union + +from .constant import TOO_BIG_SEQUENCE +from .utils import iana_name, is_multi_byte_encoding, unicode_range + + +class CharsetMatch: + def __init__( + self, + payload: bytes, + guessed_encoding: str, + mean_mess_ratio: float, + has_sig_or_bom: bool, + languages: "CoherenceMatches", + decoded_payload: Optional[str] = None, + ): + self._payload: bytes = payload + + self._encoding: str = guessed_encoding + self._mean_mess_ratio: float = mean_mess_ratio + self._languages: CoherenceMatches = languages + self._has_sig_or_bom: bool = has_sig_or_bom + self._unicode_ranges: Optional[List[str]] = None + + self._leaves: List[CharsetMatch] = [] + self._mean_coherence_ratio: float = 0.0 + + self._output_payload: Optional[bytes] = None + self._output_encoding: Optional[str] = None + + self._string: Optional[str] = decoded_payload + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CharsetMatch): + raise TypeError( + "__eq__ cannot be invoked on {} and {}.".format( + str(other.__class__), str(self.__class__) + ) + ) + return self.encoding == other.encoding and self.fingerprint == other.fingerprint + + def __lt__(self, other: object) -> bool: + """ + Implemented to make sorted available upon CharsetMatches items. + """ + if not isinstance(other, CharsetMatch): + raise ValueError + + chaos_difference: float = abs(self.chaos - other.chaos) + coherence_difference: float = abs(self.coherence - other.coherence) + + # Below 1% difference --> Use Coherence + if chaos_difference < 0.01 and coherence_difference > 0.02: + # When having a tough decision, use the result that decoded as many multi-byte as possible. + if chaos_difference == 0.0 and self.coherence == other.coherence: + return self.multi_byte_usage > other.multi_byte_usage + return self.coherence > other.coherence + + return self.chaos < other.chaos + + @property + def multi_byte_usage(self) -> float: + return 1.0 - len(str(self)) / len(self.raw) + + def __str__(self) -> str: + # Lazy Str Loading + if self._string is None: + self._string = str(self._payload, self._encoding, "strict") + return self._string + + def __repr__(self) -> str: + return "".format(self.encoding, self.fingerprint) + + def add_submatch(self, other: "CharsetMatch") -> None: + if not isinstance(other, CharsetMatch) or other == self: + raise ValueError( + "Unable to add instance <{}> as a submatch of a CharsetMatch".format( + other.__class__ + ) + ) + + other._string = None # Unload RAM usage; dirty trick. + self._leaves.append(other) + + @property + def encoding(self) -> str: + return self._encoding + + @property + def encoding_aliases(self) -> List[str]: + """ + Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. + """ + also_known_as: List[str] = [] + for u, p in aliases.items(): + if self.encoding == u: + also_known_as.append(p) + elif self.encoding == p: + also_known_as.append(u) + return also_known_as + + @property + def bom(self) -> bool: + return self._has_sig_or_bom + + @property + def byte_order_mark(self) -> bool: + return self._has_sig_or_bom + + @property + def languages(self) -> List[str]: + """ + Return the complete list of possible languages found in decoded sequence. + Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. + """ + return [e[0] for e in self._languages] + + @property + def language(self) -> str: + """ + Most probable language found in decoded sequence. If none were detected or inferred, the property will return + "Unknown". + """ + if not self._languages: + # Trying to infer the language based on the given encoding + # Its either English or we should not pronounce ourselves in certain cases. + if "ascii" in self.could_be_from_charset: + return "English" + + # doing it there to avoid circular import + from charset_normalizer.cd import encoding_languages, mb_encoding_languages + + languages = ( + mb_encoding_languages(self.encoding) + if is_multi_byte_encoding(self.encoding) + else encoding_languages(self.encoding) + ) + + if len(languages) == 0 or "Latin Based" in languages: + return "Unknown" + + return languages[0] + + return self._languages[0][0] + + @property + def chaos(self) -> float: + return self._mean_mess_ratio + + @property + def coherence(self) -> float: + if not self._languages: + return 0.0 + return self._languages[0][1] + + @property + def percent_chaos(self) -> float: + return round(self.chaos * 100, ndigits=3) + + @property + def percent_coherence(self) -> float: + return round(self.coherence * 100, ndigits=3) + + @property + def raw(self) -> bytes: + """ + Original untouched bytes. + """ + return self._payload + + @property + def submatch(self) -> List["CharsetMatch"]: + return self._leaves + + @property + def has_submatch(self) -> bool: + return len(self._leaves) > 0 + + @property + def alphabets(self) -> List[str]: + if self._unicode_ranges is not None: + return self._unicode_ranges + # list detected ranges + detected_ranges: List[Optional[str]] = [ + unicode_range(char) for char in str(self) + ] + # filter and sort + self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) + return self._unicode_ranges + + @property + def could_be_from_charset(self) -> List[str]: + """ + The complete list of encoding that output the exact SAME str result and therefore could be the originating + encoding. + This list does include the encoding available in property 'encoding'. + """ + return [self._encoding] + [m.encoding for m in self._leaves] + + def output(self, encoding: str = "utf_8") -> bytes: + """ + Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. + Any errors will be simply ignored by the encoder NOT replaced. + """ + if self._output_encoding is None or self._output_encoding != encoding: + self._output_encoding = encoding + self._output_payload = str(self).encode(encoding, "replace") + + return self._output_payload # type: ignore + + @property + def fingerprint(self) -> str: + """ + Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. + """ + return sha256(self.output()).hexdigest() + + +class CharsetMatches: + """ + Container with every CharsetMatch items ordered by default from most probable to the less one. + Act like a list(iterable) but does not implements all related methods. + """ + + def __init__(self, results: Optional[List[CharsetMatch]] = None): + self._results: List[CharsetMatch] = sorted(results) if results else [] + + def __iter__(self) -> Iterator[CharsetMatch]: + yield from self._results + + def __getitem__(self, item: Union[int, str]) -> CharsetMatch: + """ + Retrieve a single item either by its position or encoding name (alias may be used here). + Raise KeyError upon invalid index or encoding not present in results. + """ + if isinstance(item, int): + return self._results[item] + if isinstance(item, str): + item = iana_name(item, False) + for result in self._results: + if item in result.could_be_from_charset: + return result + raise KeyError + + def __len__(self) -> int: + return len(self._results) + + def __bool__(self) -> bool: + return len(self._results) > 0 + + def append(self, item: CharsetMatch) -> None: + """ + Insert a single match. Will be inserted accordingly to preserve sort. + Can be inserted as a submatch. + """ + if not isinstance(item, CharsetMatch): + raise ValueError( + "Cannot append instance '{}' to CharsetMatches".format( + str(item.__class__) + ) + ) + # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) + if len(item.raw) <= TOO_BIG_SEQUENCE: + for match in self._results: + if match.fingerprint == item.fingerprint and match.chaos == item.chaos: + match.add_submatch(item) + return + self._results.append(item) + self._results = sorted(self._results) + + def best(self) -> Optional["CharsetMatch"]: + """ + Simply return the first match. Strict equivalent to matches[0]. + """ + if not self._results: + return None + return self._results[0] + + def first(self) -> Optional["CharsetMatch"]: + """ + Redundant method, call the method best(). Kept for BC reasons. + """ + return self.best() + + +CoherenceMatch = Tuple[str, float] +CoherenceMatches = List[CoherenceMatch] + + +class CliDetectionResult: + def __init__( + self, + path: str, + encoding: Optional[str], + encoding_aliases: List[str], + alternative_encodings: List[str], + language: str, + alphabets: List[str], + has_sig_or_bom: bool, + chaos: float, + coherence: float, + unicode_path: Optional[str], + is_preferred: bool, + ): + self.path: str = path + self.unicode_path: Optional[str] = unicode_path + self.encoding: Optional[str] = encoding + self.encoding_aliases: List[str] = encoding_aliases + self.alternative_encodings: List[str] = alternative_encodings + self.language: str = language + self.alphabets: List[str] = alphabets + self.has_sig_or_bom: bool = has_sig_or_bom + self.chaos: float = chaos + self.coherence: float = coherence + self.is_preferred: bool = is_preferred + + @property + def __dict__(self) -> Dict[str, Any]: # type: ignore + return { + "path": self.path, + "encoding": self.encoding, + "encoding_aliases": self.encoding_aliases, + "alternative_encodings": self.alternative_encodings, + "language": self.language, + "alphabets": self.alphabets, + "has_sig_or_bom": self.has_sig_or_bom, + "chaos": self.chaos, + "coherence": self.coherence, + "unicode_path": self.unicode_path, + "is_preferred": self.is_preferred, + } + + def to_json(self) -> str: + return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/py.typed b/venv/lib/python3.10/site-packages/charset_normalizer/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/utils.py b/venv/lib/python3.10/site-packages/charset_normalizer/utils.py new file mode 100644 index 0000000..76eafc6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer/utils.py @@ -0,0 +1,414 @@ +import importlib +import logging +import unicodedata +from codecs import IncrementalDecoder +from encodings.aliases import aliases +from functools import lru_cache +from re import findall +from typing import Generator, List, Optional, Set, Tuple, Union + +from _multibytecodec import MultibyteIncrementalDecoder + +from .constant import ( + ENCODING_MARKS, + IANA_SUPPORTED_SIMILAR, + RE_POSSIBLE_ENCODING_INDICATION, + UNICODE_RANGES_COMBINED, + UNICODE_SECONDARY_RANGE_KEYWORD, + UTF8_MAXIMAL_ALLOCATION, +) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_accentuated(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: + return False + return ( + "WITH GRAVE" in description + or "WITH ACUTE" in description + or "WITH CEDILLA" in description + or "WITH DIAERESIS" in description + or "WITH CIRCUMFLEX" in description + or "WITH TILDE" in description + ) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def remove_accent(character: str) -> str: + decomposed: str = unicodedata.decomposition(character) + if not decomposed: + return character + + codes: List[str] = decomposed.split(" ") + + return chr(int(codes[0], 16)) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def unicode_range(character: str) -> Optional[str]: + """ + Retrieve the Unicode range official name from a single character. + """ + character_ord: int = ord(character) + + for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): + if character_ord in ord_range: + return range_name + + return None + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_latin(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: + return False + return "LATIN" in description + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_ascii(character: str) -> bool: + try: + character.encode("ascii") + except UnicodeEncodeError: + return False + return True + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_punctuation(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "P" in character_category: + return True + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Punctuation" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_symbol(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "S" in character_category or "N" in character_category: + return True + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Forms" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_emoticon(character: str) -> bool: + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Emoticons" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_separator(character: str) -> bool: + if character.isspace() or character in {"|", "+", ",", ";", "<", ">"}: + return True + + character_category: str = unicodedata.category(character) + + return "Z" in character_category + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_case_variable(character: str) -> bool: + return character.islower() != character.isupper() + + +def is_private_use_only(character: str) -> bool: + character_category: str = unicodedata.category(character) + + return character_category == "Co" + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "CJK" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hiragana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HIRAGANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_katakana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "KATAKANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hangul(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HANGUL" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_thai(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "THAI" in character_name + + +@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) +def is_unicode_range_secondary(range_name: str) -> bool: + return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_unprintable(character: str) -> bool: + return ( + character.isspace() is False # includes \n \t \r \v + and character.isprintable() is False + and character != "\x1A" # Why? Its the ASCII substitute character. + and character != "\ufeff" # bug discovered in Python, + # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. + ) + + +def any_specified_encoding(sequence: bytes, search_zone: int = 4096) -> Optional[str]: + """ + Extract using ASCII-only decoder any specified encoding in the first n-bytes. + """ + if not isinstance(sequence, bytes): + raise TypeError + + seq_len: int = len(sequence) + + results: List[str] = findall( + RE_POSSIBLE_ENCODING_INDICATION, + sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), + ) + + if len(results) == 0: + return None + + for specified_encoding in results: + specified_encoding = specified_encoding.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if encoding_alias == specified_encoding: + return encoding_iana + if encoding_iana == specified_encoding: + return encoding_iana + + return None + + +@lru_cache(maxsize=128) +def is_multi_byte_encoding(name: str) -> bool: + """ + Verify is a specific encoding is a multi byte one based on it IANA name + """ + return name in { + "utf_8", + "utf_8_sig", + "utf_16", + "utf_16_be", + "utf_16_le", + "utf_32", + "utf_32_le", + "utf_32_be", + "utf_7", + } or issubclass( + importlib.import_module("encodings.{}".format(name)).IncrementalDecoder, + MultibyteIncrementalDecoder, + ) + + +def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]: + """ + Identify and extract SIG/BOM in given sequence. + """ + + for iana_encoding in ENCODING_MARKS: + marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding] + + if isinstance(marks, bytes): + marks = [marks] + + for mark in marks: + if sequence.startswith(mark): + return iana_encoding, mark + + return None, b"" + + +def should_strip_sig_or_bom(iana_encoding: str) -> bool: + return iana_encoding not in {"utf_16", "utf_32"} + + +def iana_name(cp_name: str, strict: bool = True) -> str: + cp_name = cp_name.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if cp_name in [encoding_alias, encoding_iana]: + return encoding_iana + + if strict: + raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name)) + + return cp_name + + +def range_scan(decoded_sequence: str) -> List[str]: + ranges: Set[str] = set() + + for character in decoded_sequence: + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + continue + + ranges.add(character_range) + + return list(ranges) + + +def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: + if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): + return 0.0 + + decoder_a = importlib.import_module( + "encodings.{}".format(iana_name_a) + ).IncrementalDecoder + decoder_b = importlib.import_module( + "encodings.{}".format(iana_name_b) + ).IncrementalDecoder + + id_a: IncrementalDecoder = decoder_a(errors="ignore") + id_b: IncrementalDecoder = decoder_b(errors="ignore") + + character_match_count: int = 0 + + for i in range(255): + to_be_decoded: bytes = bytes([i]) + if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): + character_match_count += 1 + + return character_match_count / 254 + + +def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: + """ + Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using + the function cp_similarity. + """ + return ( + iana_name_a in IANA_SUPPORTED_SIMILAR + and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] + ) + + +def set_logging_handler( + name: str = "charset_normalizer", + level: int = logging.INFO, + format_string: str = "%(asctime)s | %(levelname)s | %(message)s", +) -> None: + logger = logging.getLogger(name) + logger.setLevel(level) + + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(format_string)) + logger.addHandler(handler) + + +def cut_sequence_chunks( + sequences: bytes, + encoding_iana: str, + offsets: range, + chunk_size: int, + bom_or_sig_available: bool, + strip_sig_or_bom: bool, + sig_payload: bytes, + is_multi_byte_decoder: bool, + decoded_payload: Optional[str] = None, +) -> Generator[str, None, None]: + if decoded_payload and is_multi_byte_decoder is False: + for i in offsets: + chunk = decoded_payload[i : i + chunk_size] + if not chunk: + break + yield chunk + else: + for i in offsets: + chunk_end = i + chunk_size + if chunk_end > len(sequences) + 8: + continue + + cut_sequence = sequences[i : i + chunk_size] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode( + encoding_iana, + errors="ignore" if is_multi_byte_decoder else "strict", + ) + + # multi-byte bad cutting detector and adjustment + # not the cleanest way to perform that fix but clever enough for now. + if is_multi_byte_decoder and i > 0: + chunk_partial_size_chk: int = min(chunk_size, 16) + + if ( + decoded_payload + and chunk[:chunk_partial_size_chk] not in decoded_payload + ): + for j in range(i, i - 4, -1): + cut_sequence = sequences[j:chunk_end] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode(encoding_iana, errors="ignore") + + if chunk[:chunk_partial_size_chk] in decoded_payload: + break + + yield chunk diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/version.py b/venv/lib/python3.10/site-packages/charset_normalizer/version.py new file mode 100644 index 0000000..b74c264 --- /dev/null +++ b/venv/lib/python3.10/site-packages/charset_normalizer/version.py @@ -0,0 +1,6 @@ +""" +Expose version +""" + +__version__ = "3.1.0" +VERSION = __version__.split(".") diff --git a/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/LICENSE.rst b/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/LICENSE.rst new file mode 100644 index 0000000..d12a849 --- /dev/null +++ b/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2014 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/METADATA b/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/METADATA new file mode 100644 index 0000000..8e5dc1e --- /dev/null +++ b/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/METADATA @@ -0,0 +1,111 @@ +Metadata-Version: 2.1 +Name: click +Version: 8.1.3 +Summary: Composable command line interface toolkit +Home-page: https://palletsprojects.com/p/click/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://click.palletsprojects.com/ +Project-URL: Changes, https://click.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/click/ +Project-URL: Issue Tracker, https://github.com/pallets/click/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: colorama ; platform_system == "Windows" +Requires-Dist: importlib-metadata ; python_version < "3.8" + +\$ click\_ +========== + +Click is a Python package for creating beautiful command line interfaces +in a composable way with as little code as necessary. It's the "Command +Line Interface Creation Kit". It's highly configurable but comes with +sensible defaults out of the box. + +It aims to make the process of writing command line tools quick and fun +while also preventing any frustration caused by the inability to +implement an intended CLI API. + +Click in three points: + +- Arbitrary nesting of commands +- Automatic help page generation +- Supports lazy loading of subcommands at runtime + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U click + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + import click + + @click.command() + @click.option("--count", default=1, help="Number of greetings.") + @click.option("--name", prompt="Your name", help="The person to greet.") + def hello(count, name): + """Simple program that greets NAME for a total of COUNT times.""" + for _ in range(count): + click.echo(f"Hello, {name}!") + + if __name__ == '__main__': + hello() + +.. code-block:: text + + $ python hello.py --count=3 + Your name: Click + Hello, Click! + Hello, Click! + Hello, Click! + + +Donate +------ + +The Pallets organization develops and supports Click and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://click.palletsprojects.com/ +- Changes: https://click.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/click/ +- Source Code: https://github.com/pallets/click +- Issue Tracker: https://github.com/pallets/click/issues +- Website: https://palletsprojects.com/p/click +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/RECORD b/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/RECORD new file mode 100644 index 0000000..67ff862 --- /dev/null +++ b/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/RECORD @@ -0,0 +1,39 @@ +click-8.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +click-8.1.3.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 +click-8.1.3.dist-info/METADATA,sha256=tFJIX5lOjx7c5LjZbdTPFVDJSgyv9F74XY0XCPp_gnc,3247 +click-8.1.3.dist-info/RECORD,, +click-8.1.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +click-8.1.3.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 +click/__init__.py,sha256=rQBLutqg-z6m8nOzivIfigDn_emijB_dKv9BZ2FNi5s,3138 +click/__pycache__/__init__.cpython-310.pyc,, +click/__pycache__/_compat.cpython-310.pyc,, +click/__pycache__/_termui_impl.cpython-310.pyc,, +click/__pycache__/_textwrap.cpython-310.pyc,, +click/__pycache__/_winconsole.cpython-310.pyc,, +click/__pycache__/core.cpython-310.pyc,, +click/__pycache__/decorators.cpython-310.pyc,, +click/__pycache__/exceptions.cpython-310.pyc,, +click/__pycache__/formatting.cpython-310.pyc,, +click/__pycache__/globals.cpython-310.pyc,, +click/__pycache__/parser.cpython-310.pyc,, +click/__pycache__/shell_completion.cpython-310.pyc,, +click/__pycache__/termui.cpython-310.pyc,, +click/__pycache__/testing.cpython-310.pyc,, +click/__pycache__/types.cpython-310.pyc,, +click/__pycache__/utils.cpython-310.pyc,, +click/_compat.py,sha256=JIHLYs7Jzz4KT9t-ds4o4jBzLjnwCiJQKqur-5iwCKI,18810 +click/_termui_impl.py,sha256=qK6Cfy4mRFxvxE8dya8RBhLpSC8HjF-lvBc6aNrPdwg,23451 +click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353 +click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860 +click/core.py,sha256=mz87bYEKzIoNYEa56BFAiOJnvt1Y0L-i7wD4_ZecieE,112782 +click/decorators.py,sha256=yo3zvzgUm5q7h5CXjyV6q3h_PJAiUaem178zXwdWUFI,16350 +click/exceptions.py,sha256=7gDaLGuFZBeCNwY9ERMsF2-Z3R9Fvq09Zc6IZSKjseo,9167 +click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706 +click/globals.py,sha256=TP-qM88STzc7f127h35TD_v920FgfOD2EwzqA0oE8XU,1961 +click/parser.py,sha256=cAEt1uQR8gq3-S9ysqbVU-fdAZNvilxw4ReJ_T1OQMk,19044 +click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +click/shell_completion.py,sha256=qOp_BeC9esEOSZKyu5G7RIxEUaLsXUX-mTb7hB1r4QY,18018 +click/termui.py,sha256=ACBQVOvFCTSqtD5VREeCAdRtlHd-Imla-Lte4wSfMjA,28355 +click/testing.py,sha256=ptpMYgRY7dVfE3UDgkgwayu9ePw98sQI3D7zZXiCpj4,16063 +click/types.py,sha256=rEb1aZSQKq3ciCMmjpG2Uva9vk498XRL7ThrcK2GRss,35805 +click/utils.py,sha256=33D6E7poH_nrKB-xr-UyDEXnxOcCiQqxuRLtrqeVv6o,18682 diff --git a/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/WHEEL b/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/top_level.txt new file mode 100644 index 0000000..dca9a90 --- /dev/null +++ b/venv/lib/python3.10/site-packages/click-8.1.3.dist-info/top_level.txt @@ -0,0 +1 @@ +click diff --git a/venv/lib/python3.10/site-packages/click/__init__.py b/venv/lib/python3.10/site-packages/click/__init__.py new file mode 100644 index 0000000..e3ef423 --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/__init__.py @@ -0,0 +1,73 @@ +""" +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. +""" +from .core import Argument as Argument +from .core import BaseCommand as BaseCommand +from .core import Command as Command +from .core import CommandCollection as CommandCollection +from .core import Context as Context +from .core import Group as Group +from .core import MultiCommand as MultiCommand +from .core import Option as Option +from .core import Parameter as Parameter +from .decorators import argument as argument +from .decorators import command as command +from .decorators import confirmation_option as confirmation_option +from .decorators import group as group +from .decorators import help_option as help_option +from .decorators import make_pass_decorator as make_pass_decorator +from .decorators import option as option +from .decorators import pass_context as pass_context +from .decorators import pass_obj as pass_obj +from .decorators import password_option as password_option +from .decorators import version_option as version_option +from .exceptions import Abort as Abort +from .exceptions import BadArgumentUsage as BadArgumentUsage +from .exceptions import BadOptionUsage as BadOptionUsage +from .exceptions import BadParameter as BadParameter +from .exceptions import ClickException as ClickException +from .exceptions import FileError as FileError +from .exceptions import MissingParameter as MissingParameter +from .exceptions import NoSuchOption as NoSuchOption +from .exceptions import UsageError as UsageError +from .formatting import HelpFormatter as HelpFormatter +from .formatting import wrap_text as wrap_text +from .globals import get_current_context as get_current_context +from .parser import OptionParser as OptionParser +from .termui import clear as clear +from .termui import confirm as confirm +from .termui import echo_via_pager as echo_via_pager +from .termui import edit as edit +from .termui import getchar as getchar +from .termui import launch as launch +from .termui import pause as pause +from .termui import progressbar as progressbar +from .termui import prompt as prompt +from .termui import secho as secho +from .termui import style as style +from .termui import unstyle as unstyle +from .types import BOOL as BOOL +from .types import Choice as Choice +from .types import DateTime as DateTime +from .types import File as File +from .types import FLOAT as FLOAT +from .types import FloatRange as FloatRange +from .types import INT as INT +from .types import IntRange as IntRange +from .types import ParamType as ParamType +from .types import Path as Path +from .types import STRING as STRING +from .types import Tuple as Tuple +from .types import UNPROCESSED as UNPROCESSED +from .types import UUID as UUID +from .utils import echo as echo +from .utils import format_filename as format_filename +from .utils import get_app_dir as get_app_dir +from .utils import get_binary_stream as get_binary_stream +from .utils import get_text_stream as get_text_stream +from .utils import open_file as open_file + +__version__ = "8.1.3" diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..82d24f3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/_compat.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/_compat.cpython-310.pyc new file mode 100644 index 0000000..ac10da5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/_compat.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/_termui_impl.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/_termui_impl.cpython-310.pyc new file mode 100644 index 0000000..fbddf0d Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/_termui_impl.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/_textwrap.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/_textwrap.cpython-310.pyc new file mode 100644 index 0000000..c8cde40 Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/_textwrap.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/_winconsole.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/_winconsole.cpython-310.pyc new file mode 100644 index 0000000..f2dabeb Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/_winconsole.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/core.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/core.cpython-310.pyc new file mode 100644 index 0000000..379fe7f Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/core.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/decorators.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/decorators.cpython-310.pyc new file mode 100644 index 0000000..0f6e91a Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/decorators.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000..2328914 Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/exceptions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/formatting.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/formatting.cpython-310.pyc new file mode 100644 index 0000000..f15939f Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/formatting.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/globals.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/globals.cpython-310.pyc new file mode 100644 index 0000000..e772d6d Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/globals.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/parser.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/parser.cpython-310.pyc new file mode 100644 index 0000000..cb8463b Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/parser.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/shell_completion.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/shell_completion.cpython-310.pyc new file mode 100644 index 0000000..9966a59 Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/shell_completion.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/termui.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/termui.cpython-310.pyc new file mode 100644 index 0000000..d0aea47 Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/termui.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/testing.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/testing.cpython-310.pyc new file mode 100644 index 0000000..dbef884 Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/testing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/types.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/types.cpython-310.pyc new file mode 100644 index 0000000..c3b5c88 Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/types.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000..699df4a Binary files /dev/null and b/venv/lib/python3.10/site-packages/click/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click/_compat.py b/venv/lib/python3.10/site-packages/click/_compat.py new file mode 100644 index 0000000..766d286 --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/_compat.py @@ -0,0 +1,626 @@ +import codecs +import io +import os +import re +import sys +import typing as t +from weakref import WeakKeyDictionary + +CYGWIN = sys.platform.startswith("cygwin") +MSYS2 = sys.platform.startswith("win") and ("GCC" in sys.version) +# Determine local App Engine environment, per Google's own suggestion +APP_ENGINE = "APPENGINE_RUNTIME" in os.environ and "Development/" in os.environ.get( + "SERVER_SOFTWARE", "" +) +WIN = sys.platform.startswith("win") and not APP_ENGINE and not MSYS2 +auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None +_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") + + +def get_filesystem_encoding() -> str: + return sys.getfilesystemencoding() or sys.getdefaultencoding() + + +def _make_text_stream( + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if encoding is None: + encoding = get_best_encoding(stream) + if errors is None: + errors = "replace" + return _NonClosingTextIOWrapper( + stream, + encoding, + errors, + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def is_ascii_encoding(encoding: str) -> bool: + """Checks if a given encoding is ascii.""" + try: + return codecs.lookup(encoding).name == "ascii" + except LookupError: + return False + + +def get_best_encoding(stream: t.IO) -> str: + """Returns the default stream encoding if not found.""" + rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() + if is_ascii_encoding(rv): + return "utf-8" + return rv + + +class _NonClosingTextIOWrapper(io.TextIOWrapper): + def __init__( + self, + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, + **extra: t.Any, + ) -> None: + self._stream = stream = t.cast( + t.BinaryIO, _FixupStream(stream, force_readable, force_writable) + ) + super().__init__(stream, encoding, errors, **extra) + + def __del__(self) -> None: + try: + self.detach() + except Exception: + pass + + def isatty(self) -> bool: + # https://bitbucket.org/pypy/pypy/issue/1803 + return self._stream.isatty() + + +class _FixupStream: + """The new io interface needs more from streams than streams + traditionally implement. As such, this fix-up code is necessary in + some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). + """ + + def __init__( + self, + stream: t.BinaryIO, + force_readable: bool = False, + force_writable: bool = False, + ): + self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._stream, name) + + def read1(self, size: int) -> bytes: + f = getattr(self._stream, "read1", None) + + if f is not None: + return t.cast(bytes, f(size)) + + return self._stream.read(size) + + def readable(self) -> bool: + if self._force_readable: + return True + x = getattr(self._stream, "readable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.read(0) + except Exception: + return False + return True + + def writable(self) -> bool: + if self._force_writable: + return True + x = getattr(self._stream, "writable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.write("") # type: ignore + except Exception: + try: + self._stream.write(b"") + except Exception: + return False + return True + + def seekable(self) -> bool: + x = getattr(self._stream, "seekable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.seek(self._stream.tell()) + except Exception: + return False + return True + + +def _is_binary_reader(stream: t.IO, default: bool = False) -> bool: + try: + return isinstance(stream.read(0), bytes) + except Exception: + return default + # This happens in some cases where the stream was already + # closed. In this case, we assume the default. + + +def _is_binary_writer(stream: t.IO, default: bool = False) -> bool: + try: + stream.write(b"") + except Exception: + try: + stream.write("") + return False + except Exception: + pass + return default + return True + + +def _find_binary_reader(stream: t.IO) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_reader(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_reader(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _find_binary_writer(stream: t.IO) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_writer(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_writer(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _stream_is_misconfigured(stream: t.TextIO) -> bool: + """A stream is misconfigured if its encoding is ASCII.""" + # If the stream does not have an encoding set, we assume it's set + # to ASCII. This appears to happen in certain unittest + # environments. It's not quite clear what the correct behavior is + # but this at least will force Click to recover somehow. + return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") + + +def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool: + """A stream attribute is compatible if it is equal to the + desired value or the desired value is unset and the attribute + has a value. + """ + stream_value = getattr(stream, attr, None) + return stream_value == value or (value is None and stream_value is not None) + + +def _is_compatible_text_stream( + stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> bool: + """Check if a stream's encoding and errors attributes are + compatible with the desired values. + """ + return _is_compat_stream_attr( + stream, "encoding", encoding + ) and _is_compat_stream_attr(stream, "errors", errors) + + +def _force_correct_text_stream( + text_stream: t.IO, + encoding: t.Optional[str], + errors: t.Optional[str], + is_binary: t.Callable[[t.IO, bool], bool], + find_binary: t.Callable[[t.IO], t.Optional[t.BinaryIO]], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if is_binary(text_stream, False): + binary_reader = t.cast(t.BinaryIO, text_stream) + else: + text_stream = t.cast(t.TextIO, text_stream) + # If the stream looks compatible, and won't default to a + # misconfigured ascii encoding, return it as-is. + if _is_compatible_text_stream(text_stream, encoding, errors) and not ( + encoding is None and _stream_is_misconfigured(text_stream) + ): + return text_stream + + # Otherwise, get the underlying binary reader. + possible_binary_reader = find_binary(text_stream) + + # If that's not possible, silently use the original reader + # and get mojibake instead of exceptions. + if possible_binary_reader is None: + return text_stream + + binary_reader = possible_binary_reader + + # Default errors to replace instead of strict in order to get + # something that works. + if errors is None: + errors = "replace" + + # Wrap the binary stream in a text stream with the correct + # encoding parameters. + return _make_text_stream( + binary_reader, + encoding, + errors, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def _force_correct_text_reader( + text_reader: t.IO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_reader, + encoding, + errors, + _is_binary_reader, + _find_binary_reader, + force_readable=force_readable, + ) + + +def _force_correct_text_writer( + text_writer: t.IO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_writable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_writer, + encoding, + errors, + _is_binary_writer, + _find_binary_writer, + force_writable=force_writable, + ) + + +def get_binary_stdin() -> t.BinaryIO: + reader = _find_binary_reader(sys.stdin) + if reader is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdin.") + return reader + + +def get_binary_stdout() -> t.BinaryIO: + writer = _find_binary_writer(sys.stdout) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdout.") + return writer + + +def get_binary_stderr() -> t.BinaryIO: + writer = _find_binary_writer(sys.stderr) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stderr.") + return writer + + +def get_text_stdin( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) + + +def get_text_stdout( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) + + +def get_text_stderr( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) + + +def _wrap_io_open( + file: t.Union[str, os.PathLike, int], + mode: str, + encoding: t.Optional[str], + errors: t.Optional[str], +) -> t.IO: + """Handles not passing ``encoding`` and ``errors`` in binary mode.""" + if "b" in mode: + return open(file, mode) + + return open(file, mode, encoding=encoding, errors=errors) + + +def open_stream( + filename: str, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, +) -> t.Tuple[t.IO, bool]: + binary = "b" in mode + + # Standard streams first. These are simple because they ignore the + # atomic flag. Use fsdecode to handle Path("-"). + if os.fsdecode(filename) == "-": + if any(m in mode for m in ["w", "a", "x"]): + if binary: + return get_binary_stdout(), False + return get_text_stdout(encoding=encoding, errors=errors), False + if binary: + return get_binary_stdin(), False + return get_text_stdin(encoding=encoding, errors=errors), False + + # Non-atomic writes directly go out through the regular open functions. + if not atomic: + return _wrap_io_open(filename, mode, encoding, errors), True + + # Some usability stuff for atomic writes + if "a" in mode: + raise ValueError( + "Appending to an existing file is not supported, because that" + " would involve an expensive `copy`-operation to a temporary" + " file. Open the file in normal `w`-mode and copy explicitly" + " if that's what you're after." + ) + if "x" in mode: + raise ValueError("Use the `overwrite`-parameter instead.") + if "w" not in mode: + raise ValueError("Atomic writes only make sense with `w`-mode.") + + # Atomic writes are more complicated. They work by opening a file + # as a proxy in the same folder and then using the fdopen + # functionality to wrap it in a Python file. Then we wrap it in an + # atomic file that moves the file over on close. + import errno + import random + + try: + perm: t.Optional[int] = os.stat(filename).st_mode + except OSError: + perm = None + + flags = os.O_RDWR | os.O_CREAT | os.O_EXCL + + if binary: + flags |= getattr(os, "O_BINARY", 0) + + while True: + tmp_filename = os.path.join( + os.path.dirname(filename), + f".__atomic-write{random.randrange(1 << 32):08x}", + ) + try: + fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) + break + except OSError as e: + if e.errno == errno.EEXIST or ( + os.name == "nt" + and e.errno == errno.EACCES + and os.path.isdir(e.filename) + and os.access(e.filename, os.W_OK) + ): + continue + raise + + if perm is not None: + os.chmod(tmp_filename, perm) # in case perm includes bits in umask + + f = _wrap_io_open(fd, mode, encoding, errors) + af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) + return t.cast(t.IO, af), True + + +class _AtomicFile: + def __init__(self, f: t.IO, tmp_filename: str, real_filename: str) -> None: + self._f = f + self._tmp_filename = tmp_filename + self._real_filename = real_filename + self.closed = False + + @property + def name(self) -> str: + return self._real_filename + + def close(self, delete: bool = False) -> None: + if self.closed: + return + self._f.close() + os.replace(self._tmp_filename, self._real_filename) + self.closed = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._f, name) + + def __enter__(self) -> "_AtomicFile": + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self.close(delete=exc_type is not None) + + def __repr__(self) -> str: + return repr(self._f) + + +def strip_ansi(value: str) -> str: + return _ansi_re.sub("", value) + + +def _is_jupyter_kernel_output(stream: t.IO) -> bool: + while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): + stream = stream._stream + + return stream.__class__.__module__.startswith("ipykernel.") + + +def should_strip_ansi( + stream: t.Optional[t.IO] = None, color: t.Optional[bool] = None +) -> bool: + if color is None: + if stream is None: + stream = sys.stdin + return not isatty(stream) and not _is_jupyter_kernel_output(stream) + return not color + + +# On Windows, wrap the output streams with colorama to support ANSI +# color codes. +# NOTE: double check is needed so mypy does not analyze this on Linux +if sys.platform.startswith("win") and WIN: + from ._winconsole import _get_windows_console_stream + + def _get_argv_encoding() -> str: + import locale + + return locale.getpreferredencoding() + + _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def auto_wrap_for_ansi( + stream: t.TextIO, color: t.Optional[bool] = None + ) -> t.TextIO: + """Support ANSI color and style codes on Windows by wrapping a + stream with colorama. + """ + try: + cached = _ansi_stream_wrappers.get(stream) + except Exception: + cached = None + + if cached is not None: + return cached + + import colorama + + strip = should_strip_ansi(stream, color) + ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) + rv = t.cast(t.TextIO, ansi_wrapper.stream) + _write = rv.write + + def _safe_write(s): + try: + return _write(s) + except BaseException: + ansi_wrapper.reset_all() + raise + + rv.write = _safe_write + + try: + _ansi_stream_wrappers[stream] = rv + except Exception: + pass + + return rv + +else: + + def _get_argv_encoding() -> str: + return getattr(sys.stdin, "encoding", None) or get_filesystem_encoding() + + def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] + ) -> t.Optional[t.TextIO]: + return None + + +def term_len(x: str) -> int: + return len(strip_ansi(x)) + + +def isatty(stream: t.IO) -> bool: + try: + return stream.isatty() + except Exception: + return False + + +def _make_cached_stream_func( + src_func: t.Callable[[], t.TextIO], wrapper_func: t.Callable[[], t.TextIO] +) -> t.Callable[[], t.TextIO]: + cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def func() -> t.TextIO: + stream = src_func() + try: + rv = cache.get(stream) + except Exception: + rv = None + if rv is not None: + return rv + rv = wrapper_func() + try: + cache[stream] = rv + except Exception: + pass + return rv + + return func + + +_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) +_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) +_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) + + +binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = { + "stdin": get_binary_stdin, + "stdout": get_binary_stdout, + "stderr": get_binary_stderr, +} + +text_streams: t.Mapping[ + str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO] +] = { + "stdin": get_text_stdin, + "stdout": get_text_stdout, + "stderr": get_text_stderr, +} diff --git a/venv/lib/python3.10/site-packages/click/_termui_impl.py b/venv/lib/python3.10/site-packages/click/_termui_impl.py new file mode 100644 index 0000000..4b979bc --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/_termui_impl.py @@ -0,0 +1,717 @@ +""" +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. +""" +import contextlib +import math +import os +import sys +import time +import typing as t +from gettext import gettext as _ + +from ._compat import _default_text_stdout +from ._compat import CYGWIN +from ._compat import get_best_encoding +from ._compat import isatty +from ._compat import open_stream +from ._compat import strip_ansi +from ._compat import term_len +from ._compat import WIN +from .exceptions import ClickException +from .utils import echo + +V = t.TypeVar("V") + +if os.name == "nt": + BEFORE_BAR = "\r" + AFTER_BAR = "\n" +else: + BEFORE_BAR = "\r\033[?25l" + AFTER_BAR = "\033[?25h\n" + + +class ProgressBar(t.Generic[V]): + def __init__( + self, + iterable: t.Optional[t.Iterable[V]], + length: t.Optional[int] = None, + fill_char: str = "#", + empty_char: str = " ", + bar_template: str = "%(bar)s", + info_sep: str = " ", + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + label: t.Optional[str] = None, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, + width: int = 30, + ) -> None: + self.fill_char = fill_char + self.empty_char = empty_char + self.bar_template = bar_template + self.info_sep = info_sep + self.show_eta = show_eta + self.show_percent = show_percent + self.show_pos = show_pos + self.item_show_func = item_show_func + self.label = label or "" + if file is None: + file = _default_text_stdout() + self.file = file + self.color = color + self.update_min_steps = update_min_steps + self._completed_intervals = 0 + self.width = width + self.autowidth = width == 0 + + if length is None: + from operator import length_hint + + length = length_hint(iterable, -1) + + if length == -1: + length = None + if iterable is None: + if length is None: + raise TypeError("iterable or length is required") + iterable = t.cast(t.Iterable[V], range(length)) + self.iter = iter(iterable) + self.length = length + self.pos = 0 + self.avg: t.List[float] = [] + self.start = self.last_eta = time.time() + self.eta_known = False + self.finished = False + self.max_width: t.Optional[int] = None + self.entered = False + self.current_item: t.Optional[V] = None + self.is_hidden = not isatty(self.file) + self._last_line: t.Optional[str] = None + + def __enter__(self) -> "ProgressBar": + self.entered = True + self.render_progress() + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self.render_finish() + + def __iter__(self) -> t.Iterator[V]: + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + self.render_progress() + return self.generator() + + def __next__(self) -> V: + # Iteration is defined in terms of a generator function, + # returned by iter(self); use that to define next(). This works + # because `self.iter` is an iterable consumed by that generator, + # so it is re-entry safe. Calling `next(self.generator())` + # twice works and does "what you want". + return next(iter(self)) + + def render_finish(self) -> None: + if self.is_hidden: + return + self.file.write(AFTER_BAR) + self.file.flush() + + @property + def pct(self) -> float: + if self.finished: + return 1.0 + return min(self.pos / (float(self.length or 1) or 1), 1.0) + + @property + def time_per_iteration(self) -> float: + if not self.avg: + return 0.0 + return sum(self.avg) / float(len(self.avg)) + + @property + def eta(self) -> float: + if self.length is not None and not self.finished: + return self.time_per_iteration * (self.length - self.pos) + return 0.0 + + def format_eta(self) -> str: + if self.eta_known: + t = int(self.eta) + seconds = t % 60 + t //= 60 + minutes = t % 60 + t //= 60 + hours = t % 24 + t //= 24 + if t > 0: + return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" + else: + return f"{hours:02}:{minutes:02}:{seconds:02}" + return "" + + def format_pos(self) -> str: + pos = str(self.pos) + if self.length is not None: + pos += f"/{self.length}" + return pos + + def format_pct(self) -> str: + return f"{int(self.pct * 100): 4}%"[1:] + + def format_bar(self) -> str: + if self.length is not None: + bar_length = int(self.pct * self.width) + bar = self.fill_char * bar_length + bar += self.empty_char * (self.width - bar_length) + elif self.finished: + bar = self.fill_char * self.width + else: + chars = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + chars[ + int( + (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) + * self.width + ) + ] = self.fill_char + bar = "".join(chars) + return bar + + def format_progress_line(self) -> str: + show_percent = self.show_percent + + info_bits = [] + if self.length is not None and show_percent is None: + show_percent = not self.show_pos + + if self.show_pos: + info_bits.append(self.format_pos()) + if show_percent: + info_bits.append(self.format_pct()) + if self.show_eta and self.eta_known and not self.finished: + info_bits.append(self.format_eta()) + if self.item_show_func is not None: + item_info = self.item_show_func(self.current_item) + if item_info is not None: + info_bits.append(item_info) + + return ( + self.bar_template + % { + "label": self.label, + "bar": self.format_bar(), + "info": self.info_sep.join(info_bits), + } + ).rstrip() + + def render_progress(self) -> None: + import shutil + + if self.is_hidden: + # Only output the label as it changes if the output is not a + # TTY. Use file=stderr if you expect to be piping stdout. + if self._last_line != self.label: + self._last_line = self.label + echo(self.label, file=self.file, color=self.color) + + return + + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, shutil.get_terminal_size().columns - clutter_length) + if new_width < old_width: + buf.append(BEFORE_BAR) + buf.append(" " * self.max_width) # type: ignore + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(" " * (clear_width - line_len)) + line = "".join(buf) + # Render the line only if it changed. + + if line != self._last_line: + self._last_line = line + echo(line, file=self.file, color=self.color, nl=False) + self.file.flush() + + def make_step(self, n_steps: int) -> None: + self.pos += n_steps + if self.length is not None and self.pos >= self.length: + self.finished = True + + if (time.time() - self.last_eta) < 1.0: + return + + self.last_eta = time.time() + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] + + self.eta_known = self.length is not None + + def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None: + """Update the progress bar by advancing a specified number of + steps, and optionally set the ``current_item`` for this new + position. + + :param n_steps: Number of steps to advance. + :param current_item: Optional item to set as ``current_item`` + for the updated position. + + .. versionchanged:: 8.0 + Added the ``current_item`` optional parameter. + + .. versionchanged:: 8.0 + Only render when the number of steps meets the + ``update_min_steps`` threshold. + """ + if current_item is not None: + self.current_item = current_item + + self._completed_intervals += n_steps + + if self._completed_intervals >= self.update_min_steps: + self.make_step(self._completed_intervals) + self.render_progress() + self._completed_intervals = 0 + + def finish(self) -> None: + self.eta_known = False + self.current_item = None + self.finished = True + + def generator(self) -> t.Iterator[V]: + """Return a generator which yields the items added to the bar + during construction, and updates the progress bar *after* the + yielded block returns. + """ + # WARNING: the iterator interface for `ProgressBar` relies on + # this and only works because this is a simple generator which + # doesn't create or manage additional state. If this function + # changes, the impact should be evaluated both against + # `iter(bar)` and `next(bar)`. `next()` in particular may call + # `self.generator()` repeatedly, and this must remain safe in + # order for that interface to work. + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + + if self.is_hidden: + yield from self.iter + else: + for rv in self.iter: + self.current_item = rv + + # This allows show_item_func to be updated before the + # item is processed. Only trigger at the beginning of + # the update interval. + if self._completed_intervals == 0: + self.render_progress() + + yield rv + self.update(1) + + self.finish() + self.render_progress() + + +def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None: + """Decide what method to use for paging through text.""" + stdout = _default_text_stdout() + if not isatty(sys.stdin) or not isatty(stdout): + return _nullpager(stdout, generator, color) + pager_cmd = (os.environ.get("PAGER", None) or "").strip() + if pager_cmd: + if WIN: + return _tempfilepager(generator, pager_cmd, color) + return _pipepager(generator, pager_cmd, color) + if os.environ.get("TERM") in ("dumb", "emacs"): + return _nullpager(stdout, generator, color) + if WIN or sys.platform.startswith("os2"): + return _tempfilepager(generator, "more <", color) + if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0: + return _pipepager(generator, "less", color) + + import tempfile + + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + if hasattr(os, "system") and os.system(f'more "{filename}"') == 0: + return _pipepager(generator, "more", color) + return _nullpager(stdout, generator, color) + finally: + os.unlink(filename) + + +def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> None: + """Page through text by feeding it to another program. Invoking a + pager through this might support colors. + """ + import subprocess + + env = dict(os.environ) + + # If we're piping to less we might support colors under the + # condition that + cmd_detail = cmd.rsplit("/", 1)[-1].split() + if color is None and cmd_detail[0] == "less": + less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}" + if not less_flags: + env["LESS"] = "-R" + color = True + elif "r" in less_flags or "R" in less_flags: + color = True + + c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) + stdin = t.cast(t.BinaryIO, c.stdin) + encoding = get_best_encoding(stdin) + try: + for text in generator: + if not color: + text = strip_ansi(text) + + stdin.write(text.encode(encoding, "replace")) + except (OSError, KeyboardInterrupt): + pass + else: + stdin.close() + + # Less doesn't respect ^C, but catches it for its own UI purposes (aborting + # search or other commands inside less). + # + # That means when the user hits ^C, the parent process (click) terminates, + # but less is still alive, paging the output and messing up the terminal. + # + # If the user wants to make the pager exit on ^C, they should set + # `LESS='-K'`. It's not our decision to make. + while True: + try: + c.wait() + except KeyboardInterrupt: + pass + else: + break + + +def _tempfilepager( + generator: t.Iterable[str], cmd: str, color: t.Optional[bool] +) -> None: + """Page through text by invoking a program on a temporary file.""" + import tempfile + + fd, filename = tempfile.mkstemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) + if not color: + text = strip_ansi(text) + encoding = get_best_encoding(sys.stdout) + with open_stream(filename, "wb")[0] as f: + f.write(text.encode(encoding)) + try: + os.system(f'{cmd} "{filename}"') + finally: + os.close(fd) + os.unlink(filename) + + +def _nullpager( + stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool] +) -> None: + """Simply print unformatted text. This is the ultimate fallback.""" + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) + + +class Editor: + def __init__( + self, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + ) -> None: + self.editor = editor + self.env = env + self.require_save = require_save + self.extension = extension + + def get_editor(self) -> str: + if self.editor is not None: + return self.editor + for key in "VISUAL", "EDITOR": + rv = os.environ.get(key) + if rv: + return rv + if WIN: + return "notepad" + for editor in "sensible-editor", "vim", "nano": + if os.system(f"which {editor} >/dev/null 2>&1") == 0: + return editor + return "vi" + + def edit_file(self, filename: str) -> None: + import subprocess + + editor = self.get_editor() + environ: t.Optional[t.Dict[str, str]] = None + + if self.env: + environ = os.environ.copy() + environ.update(self.env) + + try: + c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True) + exit_code = c.wait() + if exit_code != 0: + raise ClickException( + _("{editor}: Editing failed").format(editor=editor) + ) + except OSError as e: + raise ClickException( + _("{editor}: Editing failed: {e}").format(editor=editor, e=e) + ) from e + + def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]: + import tempfile + + if not text: + data = b"" + elif isinstance(text, (bytes, bytearray)): + data = text + else: + if text and not text.endswith("\n"): + text += "\n" + + if WIN: + data = text.replace("\n", "\r\n").encode("utf-8-sig") + else: + data = text.encode("utf-8") + + fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) + f: t.BinaryIO + + try: + with os.fdopen(fd, "wb") as f: + f.write(data) + + # If the filesystem resolution is 1 second, like Mac OS + # 10.12 Extended, or 2 seconds, like FAT32, and the editor + # closes very fast, require_save can fail. Set the modified + # time to be 2 seconds in the past to work around this. + os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) + # Depending on the resolution, the exact value might not be + # recorded, so get the new recorded value. + timestamp = os.path.getmtime(name) + + self.edit_file(name) + + if self.require_save and os.path.getmtime(name) == timestamp: + return None + + with open(name, "rb") as f: + rv = f.read() + + if isinstance(text, (bytes, bytearray)): + return rv + + return rv.decode("utf-8-sig").replace("\r\n", "\n") # type: ignore + finally: + os.unlink(name) + + +def open_url(url: str, wait: bool = False, locate: bool = False) -> int: + import subprocess + + def _unquote_file(url: str) -> str: + from urllib.parse import unquote + + if url.startswith("file://"): + url = unquote(url[7:]) + + return url + + if sys.platform == "darwin": + args = ["open"] + if wait: + args.append("-W") + if locate: + args.append("-R") + args.append(_unquote_file(url)) + null = open("/dev/null", "w") + try: + return subprocess.Popen(args, stderr=null).wait() + finally: + null.close() + elif WIN: + if locate: + url = _unquote_file(url.replace('"', "")) + args = f'explorer /select,"{url}"' + else: + url = url.replace('"', "") + wait_str = "/WAIT" if wait else "" + args = f'start {wait_str} "" "{url}"' + return os.system(args) + elif CYGWIN: + if locate: + url = os.path.dirname(_unquote_file(url).replace('"', "")) + args = f'cygstart "{url}"' + else: + url = url.replace('"', "") + wait_str = "-w" if wait else "" + args = f'cygstart {wait_str} "{url}"' + return os.system(args) + + try: + if locate: + url = os.path.dirname(_unquote_file(url)) or "." + else: + url = _unquote_file(url) + c = subprocess.Popen(["xdg-open", url]) + if wait: + return c.wait() + return 0 + except OSError: + if url.startswith(("http://", "https://")) and not locate and not wait: + import webbrowser + + webbrowser.open(url) + return 0 + return 1 + + +def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]: + if ch == "\x03": + raise KeyboardInterrupt() + + if ch == "\x04" and not WIN: # Unix-like, Ctrl+D + raise EOFError() + + if ch == "\x1a" and WIN: # Windows, Ctrl+Z + raise EOFError() + + return None + + +if WIN: + import msvcrt + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + yield -1 + + def getchar(echo: bool) -> str: + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. + func: t.Callable[[], str] + + if echo: + func = msvcrt.getwche # type: ignore + else: + func = msvcrt.getwch # type: ignore + + rv = func() + + if rv in ("\x00", "\xe0"): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() + + _translate_ch_to_exc(rv) + return rv + +else: + import tty + import termios + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + f: t.Optional[t.TextIO] + fd: int + + if not isatty(sys.stdin): + f = open("/dev/tty") + fd = f.fileno() + else: + fd = sys.stdin.fileno() + f = None + + try: + old_settings = termios.tcgetattr(fd) + + try: + tty.setraw(fd) + yield fd + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + sys.stdout.flush() + + if f is not None: + f.close() + except termios.error: + pass + + def getchar(echo: bool) -> str: + with raw_terminal() as fd: + ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") + + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + + _translate_ch_to_exc(ch) + return ch diff --git a/venv/lib/python3.10/site-packages/click/_textwrap.py b/venv/lib/python3.10/site-packages/click/_textwrap.py new file mode 100644 index 0000000..b47dcbd --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/_textwrap.py @@ -0,0 +1,49 @@ +import textwrap +import typing as t +from contextlib import contextmanager + + +class TextWrapper(textwrap.TextWrapper): + def _handle_long_word( + self, + reversed_chunks: t.List[str], + cur_line: t.List[str], + cur_len: int, + width: int, + ) -> None: + space_left = max(width - cur_len, 1) + + if self.break_long_words: + last = reversed_chunks[-1] + cut = last[:space_left] + res = last[space_left:] + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + @contextmanager + def extra_indent(self, indent: str) -> t.Iterator[None]: + old_initial_indent = self.initial_indent + old_subsequent_indent = self.subsequent_indent + self.initial_indent += indent + self.subsequent_indent += indent + + try: + yield + finally: + self.initial_indent = old_initial_indent + self.subsequent_indent = old_subsequent_indent + + def indent_only(self, text: str) -> str: + rv = [] + + for idx, line in enumerate(text.splitlines()): + indent = self.initial_indent + + if idx > 0: + indent = self.subsequent_indent + + rv.append(f"{indent}{line}") + + return "\n".join(rv) diff --git a/venv/lib/python3.10/site-packages/click/_winconsole.py b/venv/lib/python3.10/site-packages/click/_winconsole.py new file mode 100644 index 0000000..6b20df3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/_winconsole.py @@ -0,0 +1,279 @@ +# This module is based on the excellent work by Adam Bartoš who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prompt. +import io +import sys +import time +import typing as t +from ctypes import byref +from ctypes import c_char +from ctypes import c_char_p +from ctypes import c_int +from ctypes import c_ssize_t +from ctypes import c_ulong +from ctypes import c_void_p +from ctypes import POINTER +from ctypes import py_object +from ctypes import Structure +from ctypes.wintypes import DWORD +from ctypes.wintypes import HANDLE +from ctypes.wintypes import LPCWSTR +from ctypes.wintypes import LPWSTR + +from ._compat import _NonClosingTextIOWrapper + +assert sys.platform == "win32" +import msvcrt # noqa: E402 +from ctypes import windll # noqa: E402 +from ctypes import WINFUNCTYPE # noqa: E402 + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetConsoleMode = kernel32.GetConsoleMode +GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ("CommandLineToArgvW", windll.shell32) +) +LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) + +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b"\x1a" +MAX_BYTES_WRITTEN = 32767 + +try: + from ctypes import pythonapi +except ImportError: + # On PyPy we cannot get buffers so our ability to operate here is + # severely limited. + get_buffer = None +else: + + class Py_buffer(Structure): + _fields_ = [ + ("buf", c_void_p), + ("obj", py_object), + ("len", c_ssize_t), + ("itemsize", c_ssize_t), + ("readonly", c_int), + ("ndim", c_int), + ("format", c_char_p), + ("shape", c_ssize_p), + ("strides", c_ssize_p), + ("suboffsets", c_ssize_p), + ("internal", c_void_p), + ] + + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release + + def get_buffer(obj, writable=False): + buf = Py_buffer() + flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + + try: + buffer_type = c_char * buf.len + return buffer_type.from_address(buf.buf) + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + def __init__(self, handle): + self.handle = handle + + def isatty(self): + super().isatty() + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + def readable(self): + return True + + def readinto(self, b): + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError( + "cannot read odd number of bytes from UTF-16-LE encoded console" + ) + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW( + HANDLE(self.handle), + buffer, + code_units_to_be_read, + byref(code_units_read), + None, + ) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError(f"Windows error: {GetLastError()}") + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + def writable(self): + return True + + @staticmethod + def _get_error_message(errno): + if errno == ERROR_SUCCESS: + return "ERROR_SUCCESS" + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return "ERROR_NOT_ENOUGH_MEMORY" + return f"Windows error {errno}" + + def write(self, b): + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW( + HANDLE(self.handle), + buf, + code_units_to_be_written, + byref(code_units_written), + None, + ) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream: + def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self) -> str: + return self.buffer.name + + def write(self, x: t.AnyStr) -> int: + if isinstance(x, str): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines: t.Iterable[t.AnyStr]) -> None: + for line in lines: + self.write(line) + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._text_stream, name) + + def isatty(self) -> bool: + return self.buffer.isatty() + + def __repr__(self): + return f"" + + +def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { + 0: _get_text_stdin, + 1: _get_text_stdout, + 2: _get_text_stderr, +} + + +def _is_console(f: t.TextIO) -> bool: + if not hasattr(f, "fileno"): + return False + + try: + fileno = f.fileno() + except (OSError, io.UnsupportedOperation): + return False + + handle = msvcrt.get_osfhandle(fileno) + return bool(GetConsoleMode(handle, byref(DWORD()))) + + +def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> t.Optional[t.TextIO]: + if ( + get_buffer is not None + and encoding in {"utf-16-le", None} + and errors in {"strict", None} + and _is_console(f) + ): + func = _stream_factories.get(f.fileno()) + if func is not None: + b = getattr(f, "buffer", None) + + if b is None: + return None + + return func(b) diff --git a/venv/lib/python3.10/site-packages/click/core.py b/venv/lib/python3.10/site-packages/click/core.py new file mode 100644 index 0000000..5abfb0f --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/core.py @@ -0,0 +1,2998 @@ +import enum +import errno +import inspect +import os +import sys +import typing as t +from collections import abc +from contextlib import contextmanager +from contextlib import ExitStack +from functools import partial +from functools import update_wrapper +from gettext import gettext as _ +from gettext import ngettext +from itertools import repeat + +from . import types +from .exceptions import Abort +from .exceptions import BadParameter +from .exceptions import ClickException +from .exceptions import Exit +from .exceptions import MissingParameter +from .exceptions import UsageError +from .formatting import HelpFormatter +from .formatting import join_options +from .globals import pop_context +from .globals import push_context +from .parser import _flag_needs_value +from .parser import OptionParser +from .parser import split_opt +from .termui import confirm +from .termui import prompt +from .termui import style +from .utils import _detect_program_name +from .utils import _expand_args +from .utils import echo +from .utils import make_default_short_help +from .utils import make_str +from .utils import PacifyFlushWrapper + +if t.TYPE_CHECKING: + import typing_extensions as te + from .shell_completion import CompletionItem + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +V = t.TypeVar("V") + + +def _complete_visible_commands( + ctx: "Context", incomplete: str +) -> t.Iterator[t.Tuple[str, "Command"]]: + """List all the subcommands of a group that start with the + incomplete value and aren't hidden. + + :param ctx: Invocation context for the group. + :param incomplete: Value being completed. May be empty. + """ + multi = t.cast(MultiCommand, ctx.command) + + for name in multi.list_commands(ctx): + if name.startswith(incomplete): + command = multi.get_command(ctx, name) + + if command is not None and not command.hidden: + yield name, command + + +def _check_multicommand( + base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False +) -> None: + if not base_command.chain or not isinstance(cmd, MultiCommand): + return + if register: + hint = ( + "It is not possible to add multi commands as children to" + " another multi command that is in chain mode." + ) + else: + hint = ( + "Found a multi command as subcommand to a multi command" + " that is in chain mode. This is not supported." + ) + raise RuntimeError( + f"{hint}. Command {base_command.name!r} is set to chain and" + f" {cmd_name!r} was added as a subcommand but it in itself is a" + f" multi command. ({cmd_name!r} is a {type(cmd).__name__}" + f" within a chained {type(base_command).__name__} named" + f" {base_command.name!r})." + ) + + +def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]: + return list(zip(*repeat(iter(iterable), batch_size))) + + +@contextmanager +def augment_usage_errors( + ctx: "Context", param: t.Optional["Parameter"] = None +) -> t.Iterator[None]: + """Context manager that attaches extra information to exceptions.""" + try: + yield + except BadParameter as e: + if e.ctx is None: + e.ctx = ctx + if param is not None and e.param is None: + e.param = param + raise + except UsageError as e: + if e.ctx is None: + e.ctx = ctx + raise + + +def iter_params_for_processing( + invocation_order: t.Sequence["Parameter"], + declaration_order: t.Sequence["Parameter"], +) -> t.List["Parameter"]: + """Given a sequence of parameters in the order as should be considered + for processing and an iterable of parameters that exist, this returns + a list in the correct order as they should be processed. + """ + + def sort_key(item: "Parameter") -> t.Tuple[bool, float]: + try: + idx: float = invocation_order.index(item) + except ValueError: + idx = float("inf") + + return not item.is_eager, idx + + return sorted(declaration_order, key=sort_key) + + +class ParameterSource(enum.Enum): + """This is an :class:`~enum.Enum` that indicates the source of a + parameter's value. + + Use :meth:`click.Context.get_parameter_source` to get the + source for a parameter by name. + + .. versionchanged:: 8.0 + Use :class:`~enum.Enum` and drop the ``validate`` method. + + .. versionchanged:: 8.0 + Added the ``PROMPT`` value. + """ + + COMMANDLINE = enum.auto() + """The value was provided by the command line args.""" + ENVIRONMENT = enum.auto() + """The value was provided with an environment variable.""" + DEFAULT = enum.auto() + """Used the default specified by the parameter.""" + DEFAULT_MAP = enum.auto() + """Used a default provided by :attr:`Context.default_map`.""" + PROMPT = enum.auto() + """Used a prompt to confirm a default or provide a value.""" + + +class Context: + """The context is a special internal object that holds state relevant + for the script execution at every single level. It's normally invisible + to commands unless they opt-in to getting access to it. + + The context is useful as it can pass internal objects around and can + control special execution features such as reading data from + environment variables. + + A context can be used as context manager in which case it will call + :meth:`close` on teardown. + + :param command: the command class for this context. + :param parent: the parent context. + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it is usually + the name of the script, for commands below it it's + the name of the script. + :param obj: an arbitrary object of user data. + :param auto_envvar_prefix: the prefix to use for automatic environment + variables. If this is `None` then reading + from environment variables is disabled. This + does not affect manually set environment + variables which are always read. + :param default_map: a dictionary (like object) with default values + for parameters. + :param terminal_width: the width of the terminal. The default is + inherit from parent context. If no context + defines the terminal width then auto + detection will be applied. + :param max_content_width: the maximum width for content rendered by + Click (this currently only affects help + pages). This defaults to 80 characters if + not overridden. In other words: even if the + terminal is larger than that, Click will not + format things wider than 80 characters by + default. In addition to that, formatters might + add some safety mapping on the right. + :param resilient_parsing: if this flag is enabled then Click will + parse without any interactivity or callback + invocation. Default values will also be + ignored. This is useful for implementing + things such as completion support. + :param allow_extra_args: if this is set to `True` then extra arguments + at the end will not raise an error and will be + kept on the context. The default is to inherit + from the command. + :param allow_interspersed_args: if this is set to `False` then options + and arguments cannot be mixed. The + default is to inherit from the command. + :param ignore_unknown_options: instructs click to ignore options it does + not know and keeps them for later + processing. + :param help_option_names: optionally a list of strings that define how + the default help parameter is named. The + default is ``['--help']``. + :param token_normalize_func: an optional function that is used to + normalize tokens (options, choices, + etc.). This for instance can be used to + implement case insensitive behavior. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are used in texts that Click prints which is by + default not the case. This for instance would affect + help output. + :param show_default: Show the default value for commands. If this + value is not set, it defaults to the value from the parent + context. ``Command.show_default`` overrides this default for the + specific command. + + .. versionchanged:: 8.1 + The ``show_default`` parameter is overridden by + ``Command.show_default``, instead of the other way around. + + .. versionchanged:: 8.0 + The ``show_default`` parameter defaults to the value from the + parent context. + + .. versionchanged:: 7.1 + Added the ``show_default`` parameter. + + .. versionchanged:: 4.0 + Added the ``color``, ``ignore_unknown_options``, and + ``max_content_width`` parameters. + + .. versionchanged:: 3.0 + Added the ``allow_extra_args`` and ``allow_interspersed_args`` + parameters. + + .. versionchanged:: 2.0 + Added the ``resilient_parsing``, ``help_option_names``, and + ``token_normalize_func`` parameters. + """ + + #: The formatter class to create with :meth:`make_formatter`. + #: + #: .. versionadded:: 8.0 + formatter_class: t.Type["HelpFormatter"] = HelpFormatter + + def __init__( + self, + command: "Command", + parent: t.Optional["Context"] = None, + info_name: t.Optional[str] = None, + obj: t.Optional[t.Any] = None, + auto_envvar_prefix: t.Optional[str] = None, + default_map: t.Optional[t.Dict[str, t.Any]] = None, + terminal_width: t.Optional[int] = None, + max_content_width: t.Optional[int] = None, + resilient_parsing: bool = False, + allow_extra_args: t.Optional[bool] = None, + allow_interspersed_args: t.Optional[bool] = None, + ignore_unknown_options: t.Optional[bool] = None, + help_option_names: t.Optional[t.List[str]] = None, + token_normalize_func: t.Optional[t.Callable[[str], str]] = None, + color: t.Optional[bool] = None, + show_default: t.Optional[bool] = None, + ) -> None: + #: the parent context or `None` if none exists. + self.parent = parent + #: the :class:`Command` for this context. + self.command = command + #: the descriptive information name + self.info_name = info_name + #: Map of parameter names to their parsed values. Parameters + #: with ``expose_value=False`` are not stored. + self.params: t.Dict[str, t.Any] = {} + #: the leftover arguments. + self.args: t.List[str] = [] + #: protected arguments. These are arguments that are prepended + #: to `args` when certain parsing scenarios are encountered but + #: must be never propagated to another arguments. This is used + #: to implement nested parsing. + self.protected_args: t.List[str] = [] + #: the collected prefixes of the command's options. + self._opt_prefixes: t.Set[str] = set(parent._opt_prefixes) if parent else set() + + if obj is None and parent is not None: + obj = parent.obj + + #: the user object stored. + self.obj: t.Any = obj + self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {}) + + #: A dictionary (-like object) with defaults for parameters. + if ( + default_map is None + and info_name is not None + and parent is not None + and parent.default_map is not None + ): + default_map = parent.default_map.get(info_name) + + self.default_map: t.Optional[t.Dict[str, t.Any]] = default_map + + #: This flag indicates if a subcommand is going to be executed. A + #: group callback can use this information to figure out if it's + #: being executed directly or because the execution flow passes + #: onwards to a subcommand. By default it's None, but it can be + #: the name of the subcommand to execute. + #: + #: If chaining is enabled this will be set to ``'*'`` in case + #: any commands are executed. It is however not possible to + #: figure out which ones. If you require this knowledge you + #: should use a :func:`result_callback`. + self.invoked_subcommand: t.Optional[str] = None + + if terminal_width is None and parent is not None: + terminal_width = parent.terminal_width + + #: The width of the terminal (None is autodetection). + self.terminal_width: t.Optional[int] = terminal_width + + if max_content_width is None and parent is not None: + max_content_width = parent.max_content_width + + #: The maximum width of formatted content (None implies a sensible + #: default which is 80 for most things). + self.max_content_width: t.Optional[int] = max_content_width + + if allow_extra_args is None: + allow_extra_args = command.allow_extra_args + + #: Indicates if the context allows extra args or if it should + #: fail on parsing. + #: + #: .. versionadded:: 3.0 + self.allow_extra_args = allow_extra_args + + if allow_interspersed_args is None: + allow_interspersed_args = command.allow_interspersed_args + + #: Indicates if the context allows mixing of arguments and + #: options or not. + #: + #: .. versionadded:: 3.0 + self.allow_interspersed_args: bool = allow_interspersed_args + + if ignore_unknown_options is None: + ignore_unknown_options = command.ignore_unknown_options + + #: Instructs click to ignore options that a command does not + #: understand and will store it on the context for later + #: processing. This is primarily useful for situations where you + #: want to call into external programs. Generally this pattern is + #: strongly discouraged because it's not possibly to losslessly + #: forward all arguments. + #: + #: .. versionadded:: 4.0 + self.ignore_unknown_options: bool = ignore_unknown_options + + if help_option_names is None: + if parent is not None: + help_option_names = parent.help_option_names + else: + help_option_names = ["--help"] + + #: The names for the help options. + self.help_option_names: t.List[str] = help_option_names + + if token_normalize_func is None and parent is not None: + token_normalize_func = parent.token_normalize_func + + #: An optional normalization function for tokens. This is + #: options, choices, commands etc. + self.token_normalize_func: t.Optional[ + t.Callable[[str], str] + ] = token_normalize_func + + #: Indicates if resilient parsing is enabled. In that case Click + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. + self.resilient_parsing: bool = resilient_parsing + + # If there is no envvar prefix yet, but the parent has one and + # the command on this level has a name, we can expand the envvar + # prefix automatically. + if auto_envvar_prefix is None: + if ( + parent is not None + and parent.auto_envvar_prefix is not None + and self.info_name is not None + ): + auto_envvar_prefix = ( + f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" + ) + else: + auto_envvar_prefix = auto_envvar_prefix.upper() + + if auto_envvar_prefix is not None: + auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") + + self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix + + if color is None and parent is not None: + color = parent.color + + #: Controls if styling output is wanted or not. + self.color: t.Optional[bool] = color + + if show_default is None and parent is not None: + show_default = parent.show_default + + #: Show option default values when formatting help text. + self.show_default: t.Optional[bool] = show_default + + self._close_callbacks: t.List[t.Callable[[], t.Any]] = [] + self._depth = 0 + self._parameter_source: t.Dict[str, ParameterSource] = {} + self._exit_stack = ExitStack() + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire CLI + structure. + + .. code-block:: python + + with Context(cli) as ctx: + info = ctx.to_info_dict() + + .. versionadded:: 8.0 + """ + return { + "command": self.command.to_info_dict(self), + "info_name": self.info_name, + "allow_extra_args": self.allow_extra_args, + "allow_interspersed_args": self.allow_interspersed_args, + "ignore_unknown_options": self.ignore_unknown_options, + "auto_envvar_prefix": self.auto_envvar_prefix, + } + + def __enter__(self) -> "Context": + self._depth += 1 + push_context(self) + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self._depth -= 1 + if self._depth == 0: + self.close() + pop_context() + + @contextmanager + def scope(self, cleanup: bool = True) -> t.Iterator["Context"]: + """This helper method can be used with the context object to promote + it to the current thread local (see :func:`get_current_context`). + The default behavior of this is to invoke the cleanup functions which + can be disabled by setting `cleanup` to `False`. The cleanup + functions are typically used for things such as closing file handles. + + If the cleanup is intended the context object can also be directly + used as a context manager. + + Example usage:: + + with ctx.scope(): + assert get_current_context() is ctx + + This is equivalent:: + + with ctx: + assert get_current_context() is ctx + + .. versionadded:: 5.0 + + :param cleanup: controls if the cleanup functions should be run or + not. The default is to run these functions. In + some situations the context only wants to be + temporarily pushed in which case this can be disabled. + Nested pushes automatically defer the cleanup. + """ + if not cleanup: + self._depth += 1 + try: + with self as rv: + yield rv + finally: + if not cleanup: + self._depth -= 1 + + @property + def meta(self) -> t.Dict[str, t.Any]: + """This is a dictionary which is shared with all the contexts + that are nested. It exists so that click utilities can store some + state here if they need to. It is however the responsibility of + that code to manage this dictionary well. + + The keys are supposed to be unique dotted strings. For instance + module paths are a good choice for it. What is stored in there is + irrelevant for the operation of click. However what is important is + that code that places data here adheres to the general semantics of + the system. + + Example usage:: + + LANG_KEY = f'{__name__}.lang' + + def set_language(value): + ctx = get_current_context() + ctx.meta[LANG_KEY] = value + + def get_language(): + return get_current_context().meta.get(LANG_KEY, 'en_US') + + .. versionadded:: 5.0 + """ + return self._meta + + def make_formatter(self) -> HelpFormatter: + """Creates the :class:`~click.HelpFormatter` for the help and + usage output. + + To quickly customize the formatter class used without overriding + this method, set the :attr:`formatter_class` attribute. + + .. versionchanged:: 8.0 + Added the :attr:`formatter_class` attribute. + """ + return self.formatter_class( + width=self.terminal_width, max_width=self.max_content_width + ) + + def with_resource(self, context_manager: t.ContextManager[V]) -> V: + """Register a resource as if it were used in a ``with`` + statement. The resource will be cleaned up when the context is + popped. + + Uses :meth:`contextlib.ExitStack.enter_context`. It calls the + resource's ``__enter__()`` method and returns the result. When + the context is popped, it closes the stack, which calls the + resource's ``__exit__()`` method. + + To register a cleanup function for something that isn't a + context manager, use :meth:`call_on_close`. Or use something + from :mod:`contextlib` to turn it into a context manager first. + + .. code-block:: python + + @click.group() + @click.option("--name") + @click.pass_context + def cli(ctx): + ctx.obj = ctx.with_resource(connect_db(name)) + + :param context_manager: The context manager to enter. + :return: Whatever ``context_manager.__enter__()`` returns. + + .. versionadded:: 8.0 + """ + return self._exit_stack.enter_context(context_manager) + + def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: + """Register a function to be called when the context tears down. + + This can be used to close resources opened during the script + execution. Resources that support Python's context manager + protocol which would be used in a ``with`` statement should be + registered with :meth:`with_resource` instead. + + :param f: The function to execute on teardown. + """ + return self._exit_stack.callback(f) + + def close(self) -> None: + """Invoke all close callbacks registered with + :meth:`call_on_close`, and exit all context managers entered + with :meth:`with_resource`. + """ + self._exit_stack.close() + # In case the context is reused, create a new exit stack. + self._exit_stack = ExitStack() + + @property + def command_path(self) -> str: + """The computed command path. This is used for the ``usage`` + information on the help page. It's automatically created by + combining the info names of the chain of contexts to the root. + """ + rv = "" + if self.info_name is not None: + rv = self.info_name + if self.parent is not None: + parent_command_path = [self.parent.command_path] + + if isinstance(self.parent.command, Command): + for param in self.parent.command.get_params(self): + parent_command_path.extend(param.get_usage_pieces(self)) + + rv = f"{' '.join(parent_command_path)} {rv}" + return rv.lstrip() + + def find_root(self) -> "Context": + """Finds the outermost context.""" + node = self + while node.parent is not None: + node = node.parent + return node + + def find_object(self, object_type: t.Type[V]) -> t.Optional[V]: + """Finds the closest object of a given type.""" + node: t.Optional["Context"] = self + + while node is not None: + if isinstance(node.obj, object_type): + return node.obj + + node = node.parent + + return None + + def ensure_object(self, object_type: t.Type[V]) -> V: + """Like :meth:`find_object` but sets the innermost object to a + new instance of `object_type` if it does not exist. + """ + rv = self.find_object(object_type) + if rv is None: + self.obj = rv = object_type() + return rv + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[False]" = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]: + """Get the default for a parameter from :attr:`default_map`. + + :param name: Name of the parameter. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + if self.default_map is not None: + value = self.default_map.get(name) + + if call and callable(value): + return value() + + return value + + return None + + def fail(self, message: str) -> "te.NoReturn": + """Aborts the execution of the program with a specific error + message. + + :param message: the error message to fail with. + """ + raise UsageError(message, self) + + def abort(self) -> "te.NoReturn": + """Aborts the script.""" + raise Abort() + + def exit(self, code: int = 0) -> "te.NoReturn": + """Exits the application with a given exit code.""" + raise Exit(code) + + def get_usage(self) -> str: + """Helper method to get formatted usage string for the current + context and command. + """ + return self.command.get_usage(self) + + def get_help(self) -> str: + """Helper method to get formatted help page for the current + context and command. + """ + return self.command.get_help(self) + + def _make_sub_context(self, command: "Command") -> "Context": + """Create a new context of the same type as this context, but + for a new command. + + :meta private: + """ + return type(self)(command, info_name=command.name, parent=self) + + def invoke( + __self, # noqa: B902 + __callback: t.Union["Command", t.Callable[..., t.Any]], + *args: t.Any, + **kwargs: t.Any, + ) -> t.Any: + """Invokes a command callback in exactly the way it expects. There + are two ways to invoke this method: + + 1. the first argument can be a callback and all other arguments and + keyword arguments are forwarded directly to the function. + 2. the first argument is a click command object. In that case all + arguments are forwarded as well but proper click parameters + (options and click arguments) must be keyword arguments and Click + will fill in defaults. + + Note that before Click 3.2 keyword arguments were not properly filled + in against the intention of this code and no context was created. For + more information about this change and why it was done in a bugfix + release see :ref:`upgrade-to-3.2`. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if :meth:`forward` is called at multiple levels. + """ + if isinstance(__callback, Command): + other_cmd = __callback + + if other_cmd.callback is None: + raise TypeError( + "The given command does not have a callback that can be invoked." + ) + else: + __callback = other_cmd.callback + + ctx = __self._make_sub_context(other_cmd) + + for param in other_cmd.params: + if param.name not in kwargs and param.expose_value: + kwargs[param.name] = param.type_cast_value( # type: ignore + ctx, param.get_default(ctx) + ) + + # Track all kwargs as params, so that forward() will pass + # them on in subsequent calls. + ctx.params.update(kwargs) + else: + ctx = __self + + with augment_usage_errors(__self): + with ctx: + return __callback(*args, **kwargs) + + def forward( + __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any # noqa: B902 + ) -> t.Any: + """Similar to :meth:`invoke` but fills in default keyword + arguments from the current context if the other command expects + it. This cannot invoke callbacks directly, only other commands. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if ``forward`` is called at multiple levels. + """ + # Can only forward to other commands, not direct callbacks. + if not isinstance(__cmd, Command): + raise TypeError("Callback is not a command.") + + for param in __self.params: + if param not in kwargs: + kwargs[param] = __self.params[param] + + return __self.invoke(__cmd, *args, **kwargs) + + def set_parameter_source(self, name: str, source: ParameterSource) -> None: + """Set the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + :param name: The name of the parameter. + :param source: A member of :class:`~click.core.ParameterSource`. + """ + self._parameter_source[name] = source + + def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]: + """Get the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + This can be useful for determining when a user specified a value + on the command line that is the same as the default value. It + will be :attr:`~click.core.ParameterSource.DEFAULT` only if the + value was actually taken from the default. + + :param name: The name of the parameter. + :rtype: ParameterSource + + .. versionchanged:: 8.0 + Returns ``None`` if the parameter was not provided from any + source. + """ + return self._parameter_source.get(name) + + +class BaseCommand: + """The base command implements the minimal API contract of commands. + Most code will never use this as it does not implement a lot of useful + functionality but it can act as the direct subclass of alternative + parsing methods that do not depend on the Click parser. + + For instance, this can be used to bridge Click and other systems like + argparse or docopt. + + Because base commands do not implement a lot of the API that other + parts of Click take for granted, they are not supported for all + operations. For instance, they cannot be used with the decorators + usually and they have no built-in callback system. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + """ + + #: The context class to create with :meth:`make_context`. + #: + #: .. versionadded:: 8.0 + context_class: t.Type[Context] = Context + #: the default for the :attr:`Context.allow_extra_args` flag. + allow_extra_args = False + #: the default for the :attr:`Context.allow_interspersed_args` flag. + allow_interspersed_args = True + #: the default for the :attr:`Context.ignore_unknown_options` flag. + ignore_unknown_options = False + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.Dict[str, t.Any]] = None, + ) -> None: + #: the name the command thinks it has. Upon registering a command + #: on a :class:`Group` the group will default the command name + #: with this information. You should instead use the + #: :class:`Context`\'s :attr:`~Context.info_name` attribute. + self.name = name + + if context_settings is None: + context_settings = {} + + #: an optional dictionary with defaults passed to the context. + self.context_settings: t.Dict[str, t.Any] = context_settings + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire structure + below this command. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + :param ctx: A :class:`Context` representing this command. + + .. versionadded:: 8.0 + """ + return {"name": self.name} + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def get_usage(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get usage") + + def get_help(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get help") + + def make_context( + self, + info_name: t.Optional[str], + args: t.List[str], + parent: t.Optional[Context] = None, + **extra: t.Any, + ) -> Context: + """This function when given an info name and arguments will kick + off the parsing and create a new :class:`Context`. It does not + invoke the actual command callback though. + + To quickly customize the context class used without overriding + this method, set the :attr:`context_class` attribute. + + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it's usually + the name of the script, for commands below it it's + the name of the command. + :param args: the arguments to parse as list of strings. + :param parent: the parent context if available. + :param extra: extra keyword arguments forwarded to the context + constructor. + + .. versionchanged:: 8.0 + Added the :attr:`context_class` attribute. + """ + for key, value in self.context_settings.items(): + if key not in extra: + extra[key] = value + + ctx = self.context_class( + self, info_name=info_name, parent=parent, **extra # type: ignore + ) + + with ctx.scope(cleanup=False): + self.parse_args(ctx, args) + return ctx + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + """Given a context and a list of arguments this creates the parser + and parses the arguments, then modifies the context as necessary. + This is automatically invoked by :meth:`make_context`. + """ + raise NotImplementedError("Base commands do not know how to parse arguments.") + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the command. The default + implementation is raising a not implemented error. + """ + raise NotImplementedError("Base commands are not invokable by default") + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of chained multi-commands. + + Any command could be part of a chained multi-command, so sibling + commands are valid at any point during command completion. Other + command classes will return more completions. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + while ctx.parent is not None: + ctx = ctx.parent + + if isinstance(ctx.command, MultiCommand) and ctx.command.chain: + results.extend( + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + if name not in ctx.protected_args + ) + + return results + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: "te.Literal[True]" = True, + **extra: t.Any, + ) -> "te.NoReturn": + ... + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = ..., + **extra: t.Any, + ) -> t.Any: + ... + + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + **extra: t.Any, + ) -> t.Any: + """This is the way to invoke a script with all the bells and + whistles as a command line application. This will always terminate + the application after a call. If this is not wanted, ``SystemExit`` + needs to be caught. + + This method is also available by directly calling the instance of + a :class:`Command`. + + :param args: the arguments that should be used for parsing. If not + provided, ``sys.argv[1:]`` is used. + :param prog_name: the program name that should be used. By default + the program name is constructed by taking the file + name from ``sys.argv[0]``. + :param complete_var: the environment variable that controls the + bash completion support. The default is + ``"__COMPLETE"`` with prog_name in + uppercase. + :param standalone_mode: the default behavior is to invoke the script + in standalone mode. Click will then + handle exceptions and convert them into + error messages and the function will never + return but shut down the interpreter. If + this is set to `False` they will be + propagated to the caller and the return + value of this function is the return value + of :meth:`invoke`. + :param windows_expand_args: Expand glob patterns, user dir, and + env vars in command line args on Windows. + :param extra: extra keyword arguments are forwarded to the context + constructor. See :class:`Context` for more information. + + .. versionchanged:: 8.0.1 + Added the ``windows_expand_args`` parameter to allow + disabling command line arg expansion on Windows. + + .. versionchanged:: 8.0 + When taking arguments from ``sys.argv`` on Windows, glob + patterns, user dir, and env vars are expanded. + + .. versionchanged:: 3.0 + Added the ``standalone_mode`` parameter. + """ + if args is None: + args = sys.argv[1:] + + if os.name == "nt" and windows_expand_args: + args = _expand_args(args) + else: + args = list(args) + + if prog_name is None: + prog_name = _detect_program_name() + + # Process shell completion requests and exit early. + self._main_shell_completion(extra, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except (EOFError, KeyboardInterrupt): + echo(file=sys.stderr) + raise Abort() from None + except ClickException as e: + if not standalone_mode: + raise + e.show() + sys.exit(e.exit_code) + except OSError as e: + if e.errno == errno.EPIPE: + sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) + sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) + sys.exit(1) + else: + raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except Abort: + if not standalone_mode: + raise + echo(_("Aborted!"), file=sys.stderr) + sys.exit(1) + + def _main_shell_completion( + self, + ctx_args: t.Dict[str, t.Any], + prog_name: str, + complete_var: t.Optional[str] = None, + ) -> None: + """Check if the shell is asking for tab completion, process + that, then exit early. Called from :meth:`main` before the + program is invoked. + + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. Defaults to + ``_{PROG_NAME}_COMPLETE``. + """ + if complete_var is None: + complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper() + + instruction = os.environ.get(complete_var) + + if not instruction: + return + + from .shell_completion import shell_complete + + rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) + sys.exit(rv) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Alias for :meth:`main`.""" + return self.main(*args, **kwargs) + + +class Command(BaseCommand): + """Commands are the basic building block of command line interfaces in + Click. A basic command handles command line parsing and might dispatch + more parsing to commands nested below it. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + :param callback: the callback to invoke. This is optional. + :param params: the parameters to register with this command. This can + be either :class:`Option` or :class:`Argument` objects. + :param help: the help string to use for this command. + :param epilog: like the help string but it's printed at the end of the + help page after everything else. + :param short_help: the short help to use for this command. This is + shown on the command listing of the parent command. + :param add_help_option: by default each command registers a ``--help`` + option. This can be disabled by this parameter. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is disabled by default. + If enabled this will add ``--help`` as argument + if no arguments are passed + :param hidden: hide this command from help outputs. + + :param deprecated: issues a message indicating that + the command is deprecated. + + .. versionchanged:: 8.1 + ``help``, ``epilog``, and ``short_help`` are stored unprocessed, + all formatting is done when outputting help text, not at init, + and is done even if not using the ``@command`` decorator. + + .. versionchanged:: 8.0 + Added a ``repr`` showing the command name. + + .. versionchanged:: 7.1 + Added the ``no_args_is_help`` parameter. + + .. versionchanged:: 2.0 + Added the ``context_settings`` parameter. + """ + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.Dict[str, t.Any]] = None, + callback: t.Optional[t.Callable[..., t.Any]] = None, + params: t.Optional[t.List["Parameter"]] = None, + help: t.Optional[str] = None, + epilog: t.Optional[str] = None, + short_help: t.Optional[str] = None, + options_metavar: t.Optional[str] = "[OPTIONS]", + add_help_option: bool = True, + no_args_is_help: bool = False, + hidden: bool = False, + deprecated: bool = False, + ) -> None: + super().__init__(name, context_settings) + #: the callback to execute when the command fires. This might be + #: `None` in which case nothing happens. + self.callback = callback + #: the list of parameters for this command in the order they + #: should show up in the help page and execute. Eager parameters + #: will automatically be handled before non eager ones. + self.params: t.List["Parameter"] = params or [] + self.help = help + self.epilog = epilog + self.options_metavar = options_metavar + self.short_help = short_help + self.add_help_option = add_help_option + self.no_args_is_help = no_args_is_help + self.hidden = hidden + self.deprecated = deprecated + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + info_dict.update( + params=[param.to_info_dict() for param in self.get_params(ctx)], + help=self.help, + epilog=self.epilog, + short_help=self.short_help, + hidden=self.hidden, + deprecated=self.deprecated, + ) + return info_dict + + def get_usage(self, ctx: Context) -> str: + """Formats the usage line into a string and returns it. + + Calls :meth:`format_usage` internally. + """ + formatter = ctx.make_formatter() + self.format_usage(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_params(self, ctx: Context) -> t.List["Parameter"]: + rv = self.params + help_option = self.get_help_option(ctx) + + if help_option is not None: + rv = [*rv, help_option] + + return rv + + def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the usage line into the formatter. + + This is a low-level method called by :meth:`get_usage`. + """ + pieces = self.collect_usage_pieces(ctx) + formatter.write_usage(ctx.command_path, " ".join(pieces)) + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + """Returns all the pieces that go into the usage line and returns + it as a list of strings. + """ + rv = [self.options_metavar] if self.options_metavar else [] + + for param in self.get_params(ctx): + rv.extend(param.get_usage_pieces(ctx)) + + return rv + + def get_help_option_names(self, ctx: Context) -> t.List[str]: + """Returns the names for the help option.""" + all_names = set(ctx.help_option_names) + for param in self.params: + all_names.difference_update(param.opts) + all_names.difference_update(param.secondary_opts) + return list(all_names) + + def get_help_option(self, ctx: Context) -> t.Optional["Option"]: + """Returns the help option object.""" + help_options = self.get_help_option_names(ctx) + + if not help_options or not self.add_help_option: + return None + + def show_help(ctx: Context, param: "Parameter", value: str) -> None: + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + return Option( + help_options, + is_flag=True, + is_eager=True, + expose_value=False, + callback=show_help, + help=_("Show this message and exit."), + ) + + def make_parser(self, ctx: Context) -> OptionParser: + """Creates the underlying option parser for this command.""" + parser = OptionParser(ctx) + for param in self.get_params(ctx): + param.add_to_parser(parser, ctx) + return parser + + def get_help(self, ctx: Context) -> str: + """Formats the help into a string and returns it. + + Calls :meth:`format_help` internally. + """ + formatter = ctx.make_formatter() + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_short_help_str(self, limit: int = 45) -> str: + """Gets short help for the command or makes it by shortening the + long help string. + """ + if self.short_help: + text = inspect.cleandoc(self.short_help) + elif self.help: + text = make_default_short_help(self.help, limit) + else: + text = "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + return text.strip() + + def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help into the formatter if it exists. + + This is a low-level method called by :meth:`get_help`. + + This calls the following methods: + + - :meth:`format_usage` + - :meth:`format_help_text` + - :meth:`format_options` + - :meth:`format_epilog` + """ + self.format_usage(ctx, formatter) + self.format_help_text(ctx, formatter) + self.format_options(ctx, formatter) + self.format_epilog(ctx, formatter) + + def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help text to the formatter if it exists.""" + text = self.help if self.help is not None else "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + if text: + text = inspect.cleandoc(text).partition("\f")[0] + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(text) + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes all the options into the formatter if they exist.""" + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + opts.append(rv) + + if opts: + with formatter.section(_("Options")): + formatter.write_dl(opts) + + def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the epilog into the formatter if it exists.""" + if self.epilog: + epilog = inspect.cleandoc(self.epilog) + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(epilog) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + parser = self.make_parser(ctx) + opts, args, param_order = parser.parse_args(args=args) + + for param in iter_params_for_processing(param_order, self.get_params(ctx)): + value, args = param.handle_parse_result(ctx, opts, args) + + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: + ctx.fail( + ngettext( + "Got unexpected extra argument ({args})", + "Got unexpected extra arguments ({args})", + len(args), + ).format(args=" ".join(map(str, args))) + ) + + ctx.args = args + ctx._opt_prefixes.update(parser._opt_prefixes) + return args + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the attached callback (if it exists) + in the right way. + """ + if self.deprecated: + message = _( + "DeprecationWarning: The command {name!r} is deprecated." + ).format(name=self.name) + echo(style(message, fg="red"), err=True) + + if self.callback is not None: + return ctx.invoke(self.callback, **ctx.params) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options and chained multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + if incomplete and not incomplete[0].isalnum(): + for param in self.get_params(ctx): + if ( + not isinstance(param, Option) + or param.hidden + or ( + not param.multiple + and ctx.get_parameter_source(param.name) # type: ignore + is ParameterSource.COMMANDLINE + ) + ): + continue + + results.extend( + CompletionItem(name, help=param.help) + for name in [*param.opts, *param.secondary_opts] + if name.startswith(incomplete) + ) + + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class MultiCommand(Command): + """A multi command is the basic implementation of a command that + dispatches to subcommands. The most common version is the + :class:`Group`. + + :param invoke_without_command: this controls how the multi command itself + is invoked. By default it's only invoked + if a subcommand is provided. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is enabled by default if + `invoke_without_command` is disabled or disabled + if it's enabled. If enabled this will add + ``--help`` as argument if no arguments are + passed. + :param subcommand_metavar: the string that is used in the documentation + to indicate the subcommand place. + :param chain: if this is set to `True` chaining of multiple subcommands + is enabled. This restricts the form of commands in that + they cannot have optional arguments but it allows + multiple commands to be chained together. + :param result_callback: The result callback to attach to this multi + command. This can be set or changed later with the + :meth:`result_callback` decorator. + """ + + allow_extra_args = True + allow_interspersed_args = False + + def __init__( + self, + name: t.Optional[str] = None, + invoke_without_command: bool = False, + no_args_is_help: t.Optional[bool] = None, + subcommand_metavar: t.Optional[str] = None, + chain: bool = False, + result_callback: t.Optional[t.Callable[..., t.Any]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if no_args_is_help is None: + no_args_is_help = not invoke_without_command + + self.no_args_is_help = no_args_is_help + self.invoke_without_command = invoke_without_command + + if subcommand_metavar is None: + if chain: + subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." + else: + subcommand_metavar = "COMMAND [ARGS]..." + + self.subcommand_metavar = subcommand_metavar + self.chain = chain + # The result callback that is stored. This can be set or + # overridden with the :func:`result_callback` decorator. + self._result_callback = result_callback + + if self.chain: + for param in self.params: + if isinstance(param, Argument) and not param.required: + raise RuntimeError( + "Multi commands in chain mode cannot have" + " optional arguments." + ) + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + commands = {} + + for name in self.list_commands(ctx): + command = self.get_command(ctx, name) + + if command is None: + continue + + sub_ctx = ctx._make_sub_context(command) + + with sub_ctx.scope(cleanup=False): + commands[name] = command.to_info_dict(sub_ctx) + + info_dict.update(commands=commands, chain=self.chain) + return info_dict + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + rv = super().collect_usage_pieces(ctx) + rv.append(self.subcommand_metavar) + return rv + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + super().format_options(ctx, formatter) + self.format_commands(ctx, formatter) + + def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: + """Adds a result callback to the command. By default if a + result callback is already registered this will chain them but + this can be disabled with the `replace` parameter. The result + callback is invoked with the return value of the subcommand + (or the list of return values from all subcommands if chaining + is enabled) as well as the parameters as they would be passed + to the main callback. + + Example:: + + @click.group() + @click.option('-i', '--input', default=23) + def cli(input): + return 42 + + @cli.result_callback() + def process_result(result, input): + return result + input + + :param replace: if set to `True` an already existing result + callback will be removed. + + .. versionchanged:: 8.0 + Renamed from ``resultcallback``. + + .. versionadded:: 3.0 + """ + + def decorator(f: F) -> F: + old_callback = self._result_callback + + if old_callback is None or replace: + self._result_callback = f + return f + + def function(__value, *args, **kwargs): # type: ignore + inner = old_callback(__value, *args, **kwargs) # type: ignore + return f(inner, *args, **kwargs) + + self._result_callback = rv = update_wrapper(t.cast(F, function), f) + return rv + + return decorator + + def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: + """Extra format methods for multi methods that adds all the commands + after the options. + """ + commands = [] + for subcommand in self.list_commands(ctx): + cmd = self.get_command(ctx, subcommand) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + if cmd.hidden: + continue + + commands.append((subcommand, cmd)) + + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section(_("Commands")): + formatter.write_dl(rows) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + rest = super().parse_args(ctx, args) + + if self.chain: + ctx.protected_args = rest + ctx.args = [] + elif rest: + ctx.protected_args, ctx.args = rest[:1], rest[1:] + + return ctx.args + + def invoke(self, ctx: Context) -> t.Any: + def _process_result(value: t.Any) -> t.Any: + if self._result_callback is not None: + value = ctx.invoke(self._result_callback, value, **ctx.params) + return value + + if not ctx.protected_args: + if self.invoke_without_command: + # No subcommand was invoked, so the result callback is + # invoked with the group return value for regular + # groups, or an empty list for chained groups. + with ctx: + rv = super().invoke(ctx) + return _process_result([] if self.chain else rv) + ctx.fail(_("Missing command.")) + + # Fetch args back out + args = [*ctx.protected_args, *ctx.args] + ctx.args = [] + ctx.protected_args = [] + + # If we're not in chain mode, we only allow the invocation of a + # single command but we also inform the current context about the + # name of the command to invoke. + if not self.chain: + # Make sure the context is entered so we do not clean up + # resources until the result processor has worked. + with ctx: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + ctx.invoked_subcommand = cmd_name + super().invoke(ctx) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) + with sub_ctx: + return _process_result(sub_ctx.command.invoke(sub_ctx)) + + # In chain mode we create the contexts step by step, but after the + # base command has been invoked. Because at that point we do not + # know the subcommands yet, the invoked subcommand attribute is + # set to ``*`` to inform the command that subcommands are executed + # but nothing else. + with ctx: + ctx.invoked_subcommand = "*" if args else None + super().invoke(ctx) + + # Otherwise we make every single context and invoke them in a + # chain. In that case the return value to the result processor + # is the list of all invoked subcommand's results. + contexts = [] + while args: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + sub_ctx = cmd.make_context( + cmd_name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + ) + contexts.append(sub_ctx) + args, sub_ctx.args = sub_ctx.args, [] + + rv = [] + for sub_ctx in contexts: + with sub_ctx: + rv.append(sub_ctx.command.invoke(sub_ctx)) + return _process_result(rv) + + def resolve_command( + self, ctx: Context, args: t.List[str] + ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]: + cmd_name = make_str(args[0]) + original_cmd_name = cmd_name + + # Get the command + cmd = self.get_command(ctx, cmd_name) + + # If we can't find the command but there is a normalization + # function available, we try with that one. + if cmd is None and ctx.token_normalize_func is not None: + cmd_name = ctx.token_normalize_func(cmd_name) + cmd = self.get_command(ctx, cmd_name) + + # If we don't find the command we want to show an error message + # to the user that it was not provided. However, there is + # something else we should do: if the first argument looks like + # an option we want to kick off parsing again for arguments to + # resolve things like --help which now should go to the main + # place. + if cmd is None and not ctx.resilient_parsing: + if split_opt(cmd_name)[0]: + self.parse_args(ctx, ctx.args) + ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) + return cmd_name if cmd else None, cmd, args[1:] + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + """Given a context and a command name, this returns a + :class:`Command` object if it exists or returns `None`. + """ + raise NotImplementedError + + def list_commands(self, ctx: Context) -> t.List[str]: + """Returns a list of subcommand names in the order they should + appear. + """ + return [] + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options, subcommands, and chained + multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results = [ + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + ] + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class Group(MultiCommand): + """A group allows a command to have subcommands attached. This is + the most common way to implement nesting in Click. + + :param name: The name of the group command. + :param commands: A dict mapping names to :class:`Command` objects. + Can also be a list of :class:`Command`, which will use + :attr:`Command.name` to create the dict. + :param attrs: Other command arguments described in + :class:`MultiCommand`, :class:`Command`, and + :class:`BaseCommand`. + + .. versionchanged:: 8.0 + The ``commmands`` argument can be a list of command objects. + """ + + #: If set, this is used by the group's :meth:`command` decorator + #: as the default :class:`Command` class. This is useful to make all + #: subcommands use a custom command class. + #: + #: .. versionadded:: 8.0 + command_class: t.Optional[t.Type[Command]] = None + + #: If set, this is used by the group's :meth:`group` decorator + #: as the default :class:`Group` class. This is useful to make all + #: subgroups use a custom group class. + #: + #: If set to the special value :class:`type` (literally + #: ``group_class = type``), this group's class will be used as the + #: default class. This makes a custom group class continue to make + #: custom groups. + #: + #: .. versionadded:: 8.0 + group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None + # Literal[type] isn't valid, so use Type[type] + + def __init__( + self, + name: t.Optional[str] = None, + commands: t.Optional[t.Union[t.Dict[str, Command], t.Sequence[Command]]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if commands is None: + commands = {} + elif isinstance(commands, abc.Sequence): + commands = {c.name: c for c in commands if c.name is not None} + + #: The registered subcommands by their exported names. + self.commands: t.Dict[str, Command] = commands + + def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None: + """Registers another :class:`Command` with this group. If the name + is not provided, the name of the command is used. + """ + name = name or cmd.name + if name is None: + raise TypeError("Command has no name.") + _check_multicommand(self, name, cmd, register=True) + self.commands[name] = cmd + + @t.overload + def command(self, __func: t.Callable[..., t.Any]) -> Command: + ... + + @t.overload + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Command]: + ... + + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], Command], Command]: + """A shortcut decorator for declaring and attaching a command to + the group. This takes the same arguments as :func:`command` and + immediately registers the created command with this group by + calling :meth:`add_command`. + + To customize the command class used, set the + :attr:`command_class` attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`command_class` attribute. + """ + from .decorators import command + + if self.command_class and kwargs.get("cls") is None: + kwargs["cls"] = self.command_class + + func: t.Optional[t.Callable] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'command(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + def decorator(f: t.Callable[..., t.Any]) -> Command: + cmd: Command = command(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + @t.overload + def group(self, __func: t.Callable[..., t.Any]) -> "Group": + ... + + @t.overload + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]: + ... + + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], "Group"], "Group"]: + """A shortcut decorator for declaring and attaching a group to + the group. This takes the same arguments as :func:`group` and + immediately registers the created group with this group by + calling :meth:`add_command`. + + To customize the group class used, set the :attr:`group_class` + attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`group_class` attribute. + """ + from .decorators import group + + func: t.Optional[t.Callable] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'group(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + if self.group_class is not None and kwargs.get("cls") is None: + if self.group_class is type: + kwargs["cls"] = type(self) + else: + kwargs["cls"] = self.group_class + + def decorator(f: t.Callable[..., t.Any]) -> "Group": + cmd: Group = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + return self.commands.get(cmd_name) + + def list_commands(self, ctx: Context) -> t.List[str]: + return sorted(self.commands) + + +class CommandCollection(MultiCommand): + """A command collection is a multi command that merges multiple multi + commands together into one. This is a straightforward implementation + that accepts a list of different multi commands as sources and + provides all the commands for each of them. + """ + + def __init__( + self, + name: t.Optional[str] = None, + sources: t.Optional[t.List[MultiCommand]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + #: The list of registered multi commands. + self.sources: t.List[MultiCommand] = sources or [] + + def add_source(self, multi_cmd: MultiCommand) -> None: + """Adds a new multi command to the chain dispatcher.""" + self.sources.append(multi_cmd) + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + for source in self.sources: + rv = source.get_command(ctx, cmd_name) + + if rv is not None: + if self.chain: + _check_multicommand(self, cmd_name, rv) + + return rv + + return None + + def list_commands(self, ctx: Context) -> t.List[str]: + rv: t.Set[str] = set() + + for source in self.sources: + rv.update(source.list_commands(ctx)) + + return sorted(rv) + + +def _check_iter(value: t.Any) -> t.Iterator[t.Any]: + """Check if the value is iterable but not a string. Raises a type + error, or return an iterator over the value. + """ + if isinstance(value, str): + raise TypeError + + return iter(value) + + +class Parameter: + r"""A parameter to a command comes in two versions: they are either + :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently + not supported by design as some of the internals for parsing are + intentionally not finalized. + + Some settings are supported by both options and arguments. + + :param param_decls: the parameter declarations for this option or + argument. This is a list of flags or argument + names. + :param type: the type that should be used. Either a :class:`ParamType` + or a Python type. The later is converted into the former + automatically if supported. + :param required: controls if this is optional or not. + :param default: the default value if omitted. This can also be a callable, + in which case it's invoked when the default is needed + without any arguments. + :param callback: A function to further process or validate the value + after type conversion. It is called as ``f(ctx, param, value)`` + and must return the value. It is called for all sources, + including prompts. + :param nargs: the number of arguments to match. If not ``1`` the return + value is a tuple instead of single value. The default for + nargs is ``1`` (except if the type is a tuple, then it's + the arity of the tuple). If ``nargs=-1``, all remaining + parameters are collected. + :param metavar: how the value is represented in the help page. + :param expose_value: if this is `True` then the value is passed onwards + to the command callback and stored on the context, + otherwise it's skipped. + :param is_eager: eager values are processed before non eager ones. This + should not be set for arguments or it will inverse the + order of processing. + :param envvar: a string or list of strings that are environment variables + that should be checked. + :param shell_complete: A function that returns custom shell + completions. Used instead of the param's type completion if + given. Takes ``ctx, param, incomplete`` and must return a list + of :class:`~click.shell_completion.CompletionItem` or a list of + strings. + + .. versionchanged:: 8.0 + ``process_value`` validates required parameters and bounded + ``nargs``, and invokes the parameter callback before returning + the value. This allows the callback to validate prompts. + ``full_process_value`` is removed. + + .. versionchanged:: 8.0 + ``autocompletion`` is renamed to ``shell_complete`` and has new + semantics described above. The old name is deprecated and will + be removed in 8.1, until then it will be wrapped to match the + new requirements. + + .. versionchanged:: 8.0 + For ``multiple=True, nargs>1``, the default must be a list of + tuples. + + .. versionchanged:: 8.0 + Setting a default is no longer required for ``nargs>1``, it will + default to ``None``. ``multiple=True`` or ``nargs=-1`` will + default to ``()``. + + .. versionchanged:: 7.1 + Empty environment variables are ignored rather than taking the + empty string value. This makes it possible for scripts to clear + variables if they can't unset them. + + .. versionchanged:: 2.0 + Changed signature for parameter callback to also be passed the + parameter. The old callback format will still work, but it will + raise a warning to give you a chance to migrate the code easier. + """ + + param_type_name = "parameter" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + required: bool = False, + default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None, + callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None, + nargs: t.Optional[int] = None, + multiple: bool = False, + metavar: t.Optional[str] = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None, + shell_complete: t.Optional[ + t.Callable[ + [Context, "Parameter", str], + t.Union[t.List["CompletionItem"], t.List[str]], + ] + ] = None, + ) -> None: + self.name, self.opts, self.secondary_opts = self._parse_decls( + param_decls or (), expose_value + ) + self.type = types.convert_type(type, default) + + # Default nargs to what the type tells us if we have that + # information available. + if nargs is None: + if self.type.is_composite: + nargs = self.type.arity + else: + nargs = 1 + + self.required = required + self.callback = callback + self.nargs = nargs + self.multiple = multiple + self.expose_value = expose_value + self.default = default + self.is_eager = is_eager + self.metavar = metavar + self.envvar = envvar + self._custom_shell_complete = shell_complete + + if __debug__: + if self.type.is_composite and nargs != self.type.arity: + raise ValueError( + f"'nargs' must be {self.type.arity} (or None) for" + f" type {self.type!r}, but it was {nargs}." + ) + + # Skip no default or callable default. + check_default = default if not callable(default) else None + + if check_default is not None: + if multiple: + try: + # Only check the first value against nargs. + check_default = next(_check_iter(check_default), None) + except TypeError: + raise ValueError( + "'default' must be a list when 'multiple' is true." + ) from None + + # Can be None for multiple with empty default. + if nargs != 1 and check_default is not None: + try: + _check_iter(check_default) + except TypeError: + if multiple: + message = ( + "'default' must be a list of lists when 'multiple' is" + " true and 'nargs' != 1." + ) + else: + message = "'default' must be a list when 'nargs' != 1." + + raise ValueError(message) from None + + if nargs > 1 and len(check_default) != nargs: + subject = "item length" if multiple else "length" + raise ValueError( + f"'default' {subject} must match nargs={nargs}." + ) + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + return { + "name": self.name, + "param_type_name": self.param_type_name, + "opts": self.opts, + "secondary_opts": self.secondary_opts, + "type": self.type.to_info_dict(), + "required": self.required, + "nargs": self.nargs, + "multiple": self.multiple, + "default": self.default, + "envvar": self.envvar, + } + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + raise NotImplementedError() + + @property + def human_readable_name(self) -> str: + """Returns the human readable name of this parameter. This is the + same as the name for options, but the metavar for arguments. + """ + return self.name # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + + metavar = self.type.get_metavar(self) + + if metavar is None: + metavar = self.type.name.upper() + + if self.nargs != 1: + metavar += "..." + + return metavar + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + """Get the default for the parameter. Tries + :meth:`Context.lookup_default` first, then the local default. + + :param ctx: Current context. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0.2 + Type casting is no longer performed when getting a default. + + .. versionchanged:: 8.0.1 + Type casting can fail in resilient parsing mode. Invalid + defaults will not prevent showing help text. + + .. versionchanged:: 8.0 + Looks at ``ctx.default_map`` first. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + value = ctx.lookup_default(self.name, call=False) # type: ignore + + if value is None: + value = self.default + + if call and callable(value): + value = value() + + return value + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + raise NotImplementedError() + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, t.Any] + ) -> t.Tuple[t.Any, ParameterSource]: + value = opts.get(self.name) # type: ignore + source = ParameterSource.COMMANDLINE + + if value is None: + value = self.value_from_envvar(ctx) + source = ParameterSource.ENVIRONMENT + + if value is None: + value = ctx.lookup_default(self.name) # type: ignore + source = ParameterSource.DEFAULT_MAP + + if value is None: + value = self.get_default(ctx) + source = ParameterSource.DEFAULT + + return value, source + + def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: + """Convert and validate a value against the option's + :attr:`type`, :attr:`multiple`, and :attr:`nargs`. + """ + if value is None: + return () if self.multiple or self.nargs == -1 else None + + def check_iter(value: t.Any) -> t.Iterator: + try: + return _check_iter(value) + except TypeError: + # This should only happen when passing in args manually, + # the parser should construct an iterable when parsing + # the command line. + raise BadParameter( + _("Value must be an iterable."), ctx=ctx, param=self + ) from None + + if self.nargs == 1 or self.type.is_composite: + convert: t.Callable[[t.Any], t.Any] = partial( + self.type, param=self, ctx=ctx + ) + elif self.nargs == -1: + + def convert(value: t.Any) -> t.Tuple: + return tuple(self.type(x, self, ctx) for x in check_iter(value)) + + else: # nargs > 1 + + def convert(value: t.Any) -> t.Tuple: + value = tuple(check_iter(value)) + + if len(value) != self.nargs: + raise BadParameter( + ngettext( + "Takes {nargs} values but 1 was given.", + "Takes {nargs} values but {len} were given.", + len(value), + ).format(nargs=self.nargs, len=len(value)), + ctx=ctx, + param=self, + ) + + return tuple(self.type(x, self, ctx) for x in value) + + if self.multiple: + return tuple(convert(x) for x in check_iter(value)) + + return convert(value) + + def value_is_missing(self, value: t.Any) -> bool: + if value is None: + return True + + if (self.nargs != 1 or self.multiple) and value == (): + return True + + return False + + def process_value(self, ctx: Context, value: t.Any) -> t.Any: + value = self.type_cast_value(ctx, value) + + if self.required and self.value_is_missing(value): + raise MissingParameter(ctx=ctx, param=self) + + if self.callback is not None: + value = self.callback(ctx, self, value) + + return value + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + if self.envvar is None: + return None + + if isinstance(self.envvar, str): + rv = os.environ.get(self.envvar) + + if rv: + return rv + else: + for envvar in self.envvar: + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is not None and self.nargs != 1: + rv = self.type.split_envvar_value(rv) + + return rv + + def handle_parse_result( + self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str] + ) -> t.Tuple[t.Any, t.List[str]]: + with augment_usage_errors(ctx, param=self): + value, source = self.consume_value(ctx, opts) + ctx.set_parameter_source(self.name, source) # type: ignore + + try: + value = self.process_value(ctx, value) + except Exception: + if not ctx.resilient_parsing: + raise + + value = None + + if self.expose_value: + ctx.params[self.name] = value # type: ignore + + return value, args + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + pass + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [] + + def get_error_hint(self, ctx: Context) -> str: + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return " / ".join(f"'{x}'" for x in hint_list) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. If a + ``shell_complete`` function was given during init, it is used. + Otherwise, the :attr:`type` + :meth:`~click.types.ParamType.shell_complete` function is used. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + if self._custom_shell_complete is not None: + results = self._custom_shell_complete(ctx, self, incomplete) + + if results and isinstance(results[0], str): + from click.shell_completion import CompletionItem + + results = [CompletionItem(c) for c in results] + + return t.cast(t.List["CompletionItem"], results) + + return self.type.shell_complete(ctx, self, incomplete) + + +class Option(Parameter): + """Options are usually optional values on the command line and + have some extra features that arguments don't have. + + All other parameters are passed onwards to the parameter constructor. + + :param show_default: Show the default value for this option in its + help text. Values are not shown by default, unless + :attr:`Context.show_default` is ``True``. If this value is a + string, it shows that string in parentheses instead of the + actual value. This is particularly useful for dynamic options. + For single option boolean flags, the default remains hidden if + its value is ``False``. + :param show_envvar: Controls if an environment variable should be + shown on the help page. Normally, environment variables are not + shown. + :param prompt: If set to ``True`` or a non empty string then the + user will be prompted for input. If set to ``True`` the prompt + will be the option name capitalized. + :param confirmation_prompt: Prompt a second time to confirm the + value if it was prompted for. Can be set to a string instead of + ``True`` to customize the message. + :param prompt_required: If set to ``False``, the user will be + prompted for input only when the option was specified as a flag + without a value. + :param hide_input: If this is ``True`` then the input on the prompt + will be hidden from the user. This is useful for password input. + :param is_flag: forces this option to act as a flag. The default is + auto detection. + :param flag_value: which value should be used for this flag if it's + enabled. This is set to a boolean automatically if + the option string contains a slash to mark two options. + :param multiple: if this is set to `True` then the argument is accepted + multiple times and recorded. This is similar to ``nargs`` + in how it works but supports arbitrary number of + arguments. + :param count: this flag makes an option increment an integer. + :param allow_from_autoenv: if this is enabled then the value of this + parameter will be pulled from an environment + variable in case a prefix is defined on the + context. + :param help: the help string. + :param hidden: hide this option from help outputs. + + .. versionchanged:: 8.1.0 + Help text indentation is cleaned here instead of only in the + ``@option`` decorator. + + .. versionchanged:: 8.1.0 + The ``show_default`` parameter overrides + ``Context.show_default``. + + .. versionchanged:: 8.1.0 + The default of a single option boolean flag is not shown if the + default value is ``False``. + + .. versionchanged:: 8.0.1 + ``type`` is detected from ``flag_value`` if given. + """ + + param_type_name = "option" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + show_default: t.Union[bool, str, None] = None, + prompt: t.Union[bool, str] = False, + confirmation_prompt: t.Union[bool, str] = False, + prompt_required: bool = True, + hide_input: bool = False, + is_flag: t.Optional[bool] = None, + flag_value: t.Optional[t.Any] = None, + multiple: bool = False, + count: bool = False, + allow_from_autoenv: bool = True, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + help: t.Optional[str] = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = False, + **attrs: t.Any, + ) -> None: + if help: + help = inspect.cleandoc(help) + + default_is_missing = "default" not in attrs + super().__init__(param_decls, type=type, multiple=multiple, **attrs) + + if prompt is True: + if self.name is None: + raise TypeError("'name' is required with 'prompt=True'.") + + prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize() + elif prompt is False: + prompt_text = None + else: + prompt_text = prompt + + self.prompt = prompt_text + self.confirmation_prompt = confirmation_prompt + self.prompt_required = prompt_required + self.hide_input = hide_input + self.hidden = hidden + + # If prompt is enabled but not required, then the option can be + # used as a flag to indicate using prompt or flag_value. + self._flag_needs_value = self.prompt is not None and not self.prompt_required + + if is_flag is None: + if flag_value is not None: + # Implicitly a flag because flag_value was set. + is_flag = True + elif self._flag_needs_value: + # Not a flag, but when used as a flag it shows a prompt. + is_flag = False + else: + # Implicitly a flag because flag options were given. + is_flag = bool(self.secondary_opts) + elif is_flag is False and not self._flag_needs_value: + # Not a flag, and prompt is not enabled, can be used as a + # flag if flag_value is set. + self._flag_needs_value = flag_value is not None + + if is_flag and default_is_missing and not self.required: + self.default: t.Union[t.Any, t.Callable[[], t.Any]] = False + + if flag_value is None: + flag_value = not self.default + + if is_flag and type is None: + # Re-guess the type from the flag value instead of the + # default. + self.type = types.convert_type(None, flag_value) + + self.is_flag: bool = is_flag + self.is_bool_flag = is_flag and isinstance(self.type, types.BoolParamType) + self.flag_value: t.Any = flag_value + + # Counting + self.count = count + if count: + if type is None: + self.type = types.IntRange(min=0) + if default_is_missing: + self.default = 0 + + self.allow_from_autoenv = allow_from_autoenv + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + + if __debug__: + if self.nargs == -1: + raise TypeError("nargs=-1 is not supported for options.") + + if self.prompt and self.is_flag and not self.is_bool_flag: + raise TypeError("'prompt' is not valid for non-boolean flag.") + + if not self.is_bool_flag and self.secondary_opts: + raise TypeError("Secondary flag is not valid for non-boolean flag.") + + if self.is_bool_flag and self.hide_input and self.prompt is not None: + raise TypeError( + "'prompt' with 'hide_input' is not valid for boolean flag." + ) + + if self.count: + if self.multiple: + raise TypeError("'count' is not valid with 'multiple'.") + + if self.is_flag: + raise TypeError("'count' is not valid with 'is_flag'.") + + if self.multiple and self.is_flag: + raise TypeError("'multiple' is not valid with 'is_flag', use 'count'.") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + help=self.help, + prompt=self.prompt, + is_flag=self.is_flag, + flag_value=self.flag_value, + count=self.count, + hidden=self.hidden, + ) + return info_dict + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + opts = [] + secondary_opts = [] + name = None + possible_names = [] + + for decl in decls: + if decl.isidentifier(): + if name is not None: + raise TypeError(f"Name '{name}' defined twice") + name = decl + else: + split_char = ";" if decl[:1] == "/" else "/" + if split_char in decl: + first, second = decl.split(split_char, 1) + first = first.rstrip() + if first: + possible_names.append(split_opt(first)) + opts.append(first) + second = second.lstrip() + if second: + secondary_opts.append(second.lstrip()) + if first == second: + raise ValueError( + f"Boolean option {decl!r} cannot use the" + " same flag for true/false." + ) + else: + possible_names.append(split_opt(decl)) + opts.append(decl) + + if name is None and possible_names: + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace("-", "_").lower() + if not name.isidentifier(): + name = None + + if name is None: + if not expose_value: + return None, opts, secondary_opts + raise TypeError("Could not determine name for option") + + if not opts and not secondary_opts: + raise TypeError( + f"No options defined but a name was passed ({name})." + " Did you mean to declare an argument instead? Did" + f" you mean to pass '--{name}'?" + ) + + return name, opts, secondary_opts + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + if self.multiple: + action = "append" + elif self.count: + action = "count" + else: + action = "store" + + if self.is_flag: + action = f"{action}_const" + + if self.is_bool_flag and self.secondary_opts: + parser.add_option( + obj=self, opts=self.opts, dest=self.name, action=action, const=True + ) + parser.add_option( + obj=self, + opts=self.secondary_opts, + dest=self.name, + action=action, + const=False, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + const=self.flag_value, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + nargs=self.nargs, + ) + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + if self.hidden: + return None + + any_prefix_is_slash = False + + def _write_opts(opts: t.Sequence[str]) -> str: + nonlocal any_prefix_is_slash + + rv, any_slashes = join_options(opts) + + if any_slashes: + any_prefix_is_slash = True + + if not self.is_flag and not self.count: + rv += f" {self.make_metavar()}" + + return rv + + rv = [_write_opts(self.opts)] + + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or "" + extra = [] + + if self.show_envvar: + envvar = self.envvar + + if envvar is None: + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + + if envvar is not None: + var_str = ( + envvar + if isinstance(envvar, str) + else ", ".join(str(d) for d in envvar) + ) + extra.append(_("env var: {var}").format(var=var_str)) + + # Temporarily enable resilient parsing to avoid type casting + # failing for the default. Might be possible to extend this to + # help formatting in general. + resilient = ctx.resilient_parsing + ctx.resilient_parsing = True + + try: + default_value = self.get_default(ctx, call=False) + finally: + ctx.resilient_parsing = resilient + + show_default = False + show_default_is_str = False + + if self.show_default is not None: + if isinstance(self.show_default, str): + show_default_is_str = show_default = True + else: + show_default = self.show_default + elif ctx.show_default is not None: + show_default = ctx.show_default + + if show_default_is_str or (show_default and (default_value is not None)): + if show_default_is_str: + default_string = f"({self.show_default})" + elif isinstance(default_value, (list, tuple)): + default_string = ", ".join(str(d) for d in default_value) + elif inspect.isfunction(default_value): + default_string = _("(dynamic)") + elif self.is_bool_flag and self.secondary_opts: + # For boolean flags that have distinct True/False opts, + # use the opt without prefix instead of the value. + default_string = split_opt( + (self.opts if self.default else self.secondary_opts)[0] + )[1] + elif self.is_bool_flag and not self.secondary_opts and not default_value: + default_string = "" + else: + default_string = str(default_value) + + if default_string: + extra.append(_("default: {default}").format(default=default_string)) + + if ( + isinstance(self.type, types._NumberRangeBase) + # skip count with default range type + and not (self.count and self.type.min == 0 and self.type.max is None) + ): + range_str = self.type._describe_range() + + if range_str: + extra.append(range_str) + + if self.required: + extra.append(_("required")) + + if extra: + extra_str = "; ".join(extra) + help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" + + return ("; " if any_prefix_is_slash else " / ").join(rv), help + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + # If we're a non boolean flag our default is more complex because + # we need to look at all flags in the same group to figure out + # if we're the default one in which case we return the flag + # value as default. + if self.is_flag and not self.is_bool_flag: + for param in ctx.command.params: + if param.name == self.name and param.default: + return param.flag_value # type: ignore + + return None + + return super().get_default(ctx, call=call) + + def prompt_for_value(self, ctx: Context) -> t.Any: + """This is an alternative flow that can be activated in the full + value processing if a value does not exist. It will prompt the + user until a valid value exists and then returns the processed + value as result. + """ + assert self.prompt is not None + + # Calculate the default before prompting anything to be stable. + default = self.get_default(ctx) + + # If this is a prompt for a flag we need to handle this + # differently. + if self.is_bool_flag: + return confirm(self.prompt, default) + + return prompt( + self.prompt, + default=default, + type=self.type, + hide_input=self.hide_input, + show_choices=self.show_choices, + confirmation_prompt=self.confirmation_prompt, + value_proc=lambda x: self.process_value(ctx, x), + ) + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + rv = super().resolve_envvar_value(ctx) + + if rv is not None: + return rv + + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is None: + return None + + value_depth = (self.nargs != 1) + bool(self.multiple) + + if value_depth > 0: + rv = self.type.split_envvar_value(rv) + + if self.multiple and self.nargs != 1: + rv = batch(rv, self.nargs) + + return rv + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, "Parameter"] + ) -> t.Tuple[t.Any, ParameterSource]: + value, source = super().consume_value(ctx, opts) + + # The parser will emit a sentinel value if the option can be + # given as a flag without a value. This is different from None + # to distinguish from the flag not being given at all. + if value is _flag_needs_value: + if self.prompt is not None and not ctx.resilient_parsing: + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + else: + value = self.flag_value + source = ParameterSource.COMMANDLINE + + elif ( + self.multiple + and value is not None + and any(v is _flag_needs_value for v in value) + ): + value = [self.flag_value if v is _flag_needs_value else v for v in value] + source = ParameterSource.COMMANDLINE + + # The value wasn't set, or used the param's default, prompt if + # prompting is enabled. + elif ( + source in {None, ParameterSource.DEFAULT} + and self.prompt is not None + and (self.required or self.prompt_required) + and not ctx.resilient_parsing + ): + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + + return value, source + + +class Argument(Parameter): + """Arguments are positional parameters to a command. They generally + provide fewer features than options but can have infinite ``nargs`` + and are required by default. + + All parameters are passed onwards to the parameter constructor. + """ + + param_type_name = "argument" + + def __init__( + self, + param_decls: t.Sequence[str], + required: t.Optional[bool] = None, + **attrs: t.Any, + ) -> None: + if required is None: + if attrs.get("default") is not None: + required = False + else: + required = attrs.get("nargs", 1) > 0 + + if "multiple" in attrs: + raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") + + super().__init__(param_decls, required=required, **attrs) + + if __debug__: + if self.default is not None and self.nargs == -1: + raise TypeError("'default' is not supported for nargs=-1.") + + @property + def human_readable_name(self) -> str: + if self.metavar is not None: + return self.metavar + return self.name.upper() # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + var = self.type.get_metavar(self) + if not var: + var = self.name.upper() # type: ignore + if not self.required: + var = f"[{var}]" + if self.nargs != 1: + var += "..." + return var + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + if not decls: + if not expose_value: + return None, [], [] + raise TypeError("Could not determine name for argument") + if len(decls) == 1: + name = arg = decls[0] + name = name.replace("-", "_").lower() + else: + raise TypeError( + "Arguments take exactly one parameter declaration, got" + f" {len(decls)}." + ) + return name, [arg], [] + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [self.make_metavar()] + + def get_error_hint(self, ctx: Context) -> str: + return f"'{self.make_metavar()}'" + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/venv/lib/python3.10/site-packages/click/decorators.py b/venv/lib/python3.10/site-packages/click/decorators.py new file mode 100644 index 0000000..28618dc --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/decorators.py @@ -0,0 +1,497 @@ +import inspect +import types +import typing as t +from functools import update_wrapper +from gettext import gettext as _ + +from .core import Argument +from .core import Command +from .core import Context +from .core import Group +from .core import Option +from .core import Parameter +from .globals import get_current_context +from .utils import echo + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +FC = t.TypeVar("FC", bound=t.Union[t.Callable[..., t.Any], Command]) + + +def pass_context(f: F) -> F: + """Marks a callback as wanting to receive the current context + object as first argument. + """ + + def new_func(*args, **kwargs): # type: ignore + return f(get_current_context(), *args, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + +def pass_obj(f: F) -> F: + """Similar to :func:`pass_context`, but only pass the object on the + context onwards (:attr:`Context.obj`). This is useful if that object + represents the state of a nested system. + """ + + def new_func(*args, **kwargs): # type: ignore + return f(get_current_context().obj, *args, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + +def make_pass_decorator( + object_type: t.Type, ensure: bool = False +) -> "t.Callable[[F], F]": + """Given an object type this creates a decorator that will work + similar to :func:`pass_obj` but instead of passing the object of the + current context, it will find the innermost context of type + :func:`object_type`. + + This generates a decorator that works roughly like this:: + + from functools import update_wrapper + + def decorator(f): + @pass_context + def new_func(ctx, *args, **kwargs): + obj = ctx.find_object(object_type) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + :param object_type: the type of the object to pass. + :param ensure: if set to `True`, a new object will be created and + remembered on the context if it's not there yet. + """ + + def decorator(f: F) -> F: + def new_func(*args, **kwargs): # type: ignore + ctx = get_current_context() + + if ensure: + obj = ctx.ensure_object(object_type) + else: + obj = ctx.find_object(object_type) + + if obj is None: + raise RuntimeError( + "Managed to invoke callback without a context" + f" object of type {object_type.__name__!r}" + " existing." + ) + + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + return decorator + + +def pass_meta_key( + key: str, *, doc_description: t.Optional[str] = None +) -> "t.Callable[[F], F]": + """Create a decorator that passes a key from + :attr:`click.Context.meta` as the first argument to the decorated + function. + + :param key: Key in ``Context.meta`` to pass. + :param doc_description: Description of the object being passed, + inserted into the decorator's docstring. Defaults to "the 'key' + key from Context.meta". + + .. versionadded:: 8.0 + """ + + def decorator(f: F) -> F: + def new_func(*args, **kwargs): # type: ignore + ctx = get_current_context() + obj = ctx.meta[key] + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + if doc_description is None: + doc_description = f"the {key!r} key from :attr:`click.Context.meta`" + + decorator.__doc__ = ( + f"Decorator that passes {doc_description} as the first argument" + " to the decorated function." + ) + return decorator + + +CmdType = t.TypeVar("CmdType", bound=Command) + + +@t.overload +def command( + __func: t.Callable[..., t.Any], +) -> Command: + ... + + +@t.overload +def command( + name: t.Optional[str] = None, + **attrs: t.Any, +) -> t.Callable[..., Command]: + ... + + +@t.overload +def command( + name: t.Optional[str] = None, + cls: t.Type[CmdType] = ..., + **attrs: t.Any, +) -> t.Callable[..., CmdType]: + ... + + +def command( + name: t.Union[str, t.Callable[..., t.Any], None] = None, + cls: t.Optional[t.Type[Command]] = None, + **attrs: t.Any, +) -> t.Union[Command, t.Callable[..., Command]]: + r"""Creates a new :class:`Command` and uses the decorated function as + callback. This will also automatically attach all decorated + :func:`option`\s and :func:`argument`\s as parameters to the command. + + The name of the command defaults to the name of the function with + underscores replaced by dashes. If you want to change that, you can + pass the intended name as the first argument. + + All keyword arguments are forwarded to the underlying command class. + For the ``params`` argument, any decorated params are appended to + the end of the list. + + Once decorated the function turns into a :class:`Command` instance + that can be invoked as a command line utility or be attached to a + command :class:`Group`. + + :param name: the name of the command. This defaults to the function + name with underscores replaced by dashes. + :param cls: the command class to instantiate. This defaults to + :class:`Command`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.1 + The ``params`` argument can be used. Decorated params are + appended to the end of the list. + """ + + func: t.Optional[t.Callable[..., t.Any]] = None + + if callable(name): + func = name + name = None + assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." + assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." + + if cls is None: + cls = Command + + def decorator(f: t.Callable[..., t.Any]) -> Command: + if isinstance(f, Command): + raise TypeError("Attempted to convert a callback into a command twice.") + + attr_params = attrs.pop("params", None) + params = attr_params if attr_params is not None else [] + + try: + decorator_params = f.__click_params__ # type: ignore + except AttributeError: + pass + else: + del f.__click_params__ # type: ignore + params.extend(reversed(decorator_params)) + + if attrs.get("help") is None: + attrs["help"] = f.__doc__ + + cmd = cls( # type: ignore[misc] + name=name or f.__name__.lower().replace("_", "-"), # type: ignore[arg-type] + callback=f, + params=params, + **attrs, + ) + cmd.__doc__ = f.__doc__ + return cmd + + if func is not None: + return decorator(func) + + return decorator + + +@t.overload +def group( + __func: t.Callable[..., t.Any], +) -> Group: + ... + + +@t.overload +def group( + name: t.Optional[str] = None, + **attrs: t.Any, +) -> t.Callable[[F], Group]: + ... + + +def group( + name: t.Union[str, t.Callable[..., t.Any], None] = None, **attrs: t.Any +) -> t.Union[Group, t.Callable[[F], Group]]: + """Creates a new :class:`Group` with a function as callback. This + works otherwise the same as :func:`command` just that the `cls` + parameter is set to :class:`Group`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + """ + if attrs.get("cls") is None: + attrs["cls"] = Group + + if callable(name): + grp: t.Callable[[F], Group] = t.cast(Group, command(**attrs)) + return grp(name) + + return t.cast(Group, command(name, **attrs)) + + +def _param_memo(f: FC, param: Parameter) -> None: + if isinstance(f, Command): + f.params.append(param) + else: + if not hasattr(f, "__click_params__"): + f.__click_params__ = [] # type: ignore + + f.__click_params__.append(param) # type: ignore + + +def argument(*param_decls: str, **attrs: t.Any) -> t.Callable[[FC], FC]: + """Attaches an argument to the command. All positional arguments are + passed as parameter declarations to :class:`Argument`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Argument` instance manually + and attaching it to the :attr:`Command.params` list. + + :param cls: the argument class to instantiate. This defaults to + :class:`Argument`. + """ + + def decorator(f: FC) -> FC: + ArgumentClass = attrs.pop("cls", None) or Argument + _param_memo(f, ArgumentClass(param_decls, **attrs)) + return f + + return decorator + + +def option(*param_decls: str, **attrs: t.Any) -> t.Callable[[FC], FC]: + """Attaches an option to the command. All positional arguments are + passed as parameter declarations to :class:`Option`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Option` instance manually + and attaching it to the :attr:`Command.params` list. + + :param cls: the option class to instantiate. This defaults to + :class:`Option`. + """ + + def decorator(f: FC) -> FC: + # Issue 926, copy attrs, so pre-defined options can re-use the same cls= + option_attrs = attrs.copy() + OptionClass = option_attrs.pop("cls", None) or Option + _param_memo(f, OptionClass(param_decls, **option_attrs)) + return f + + return decorator + + +def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--yes`` option which shows a prompt before continuing if + not passed. If the prompt is declined, the program will exit. + + :param param_decls: One or more option names. Defaults to the single + value ``"--yes"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value: + ctx.abort() + + if not param_decls: + param_decls = ("--yes",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("callback", callback) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("prompt", "Do you want to continue?") + kwargs.setdefault("help", "Confirm the action without prompting.") + return option(*param_decls, **kwargs) + + +def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--password`` option which prompts for a password, hiding + input and asking to enter the value again for confirmation. + + :param param_decls: One or more option names. Defaults to the single + value ``"--password"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + if not param_decls: + param_decls = ("--password",) + + kwargs.setdefault("prompt", True) + kwargs.setdefault("confirmation_prompt", True) + kwargs.setdefault("hide_input", True) + return option(*param_decls, **kwargs) + + +def version_option( + version: t.Optional[str] = None, + *param_decls: str, + package_name: t.Optional[str] = None, + prog_name: t.Optional[str] = None, + message: t.Optional[str] = None, + **kwargs: t.Any, +) -> t.Callable[[FC], FC]: + """Add a ``--version`` option which immediately prints the version + number and exits the program. + + If ``version`` is not provided, Click will try to detect it using + :func:`importlib.metadata.version` to get the version for the + ``package_name``. On Python < 3.8, the ``importlib_metadata`` + backport must be installed. + + If ``package_name`` is not provided, Click will try to detect it by + inspecting the stack frames. This will be used to detect the + version, so it must match the name of the installed package. + + :param version: The version number to show. If not provided, Click + will try to detect it. + :param param_decls: One or more option names. Defaults to the single + value ``"--version"``. + :param package_name: The package name to detect the version from. If + not provided, Click will try to detect it. + :param prog_name: The name of the CLI to show in the message. If not + provided, it will be detected from the command. + :param message: The message to show. The values ``%(prog)s``, + ``%(package)s``, and ``%(version)s`` are available. Defaults to + ``"%(prog)s, version %(version)s"``. + :param kwargs: Extra arguments are passed to :func:`option`. + :raise RuntimeError: ``version`` could not be detected. + + .. versionchanged:: 8.0 + Add the ``package_name`` parameter, and the ``%(package)s`` + value for messages. + + .. versionchanged:: 8.0 + Use :mod:`importlib.metadata` instead of ``pkg_resources``. The + version is detected based on the package name, not the entry + point name. The Python package name must match the installed + package name, or be passed with ``package_name=``. + """ + if message is None: + message = _("%(prog)s, version %(version)s") + + if version is None and package_name is None: + frame = inspect.currentframe() + f_back = frame.f_back if frame is not None else None + f_globals = f_back.f_globals if f_back is not None else None + # break reference cycle + # https://docs.python.org/3/library/inspect.html#the-interpreter-stack + del frame + + if f_globals is not None: + package_name = f_globals.get("__name__") + + if package_name == "__main__": + package_name = f_globals.get("__package__") + + if package_name: + package_name = package_name.partition(".")[0] + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + nonlocal prog_name + nonlocal version + + if prog_name is None: + prog_name = ctx.find_root().info_name + + if version is None and package_name is not None: + metadata: t.Optional[types.ModuleType] + + try: + from importlib import metadata # type: ignore + except ImportError: + # Python < 3.8 + import importlib_metadata as metadata # type: ignore + + try: + version = metadata.version(package_name) # type: ignore + except metadata.PackageNotFoundError: # type: ignore + raise RuntimeError( + f"{package_name!r} is not installed. Try passing" + " 'package_name' instead." + ) from None + + if version is None: + raise RuntimeError( + f"Could not determine the version for {package_name!r} automatically." + ) + + echo( + t.cast(str, message) + % {"prog": prog_name, "package": package_name, "version": version}, + color=ctx.color, + ) + ctx.exit() + + if not param_decls: + param_decls = ("--version",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show the version and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) + + +def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--help`` option which immediately prints the help page + and exits the program. + + This is usually unnecessary, as the ``--help`` option is added to + each command automatically unless ``add_help_option=False`` is + passed. + + :param param_decls: One or more option names. Defaults to the single + value ``"--help"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + if not param_decls: + param_decls = ("--help",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show this message and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) diff --git a/venv/lib/python3.10/site-packages/click/exceptions.py b/venv/lib/python3.10/site-packages/click/exceptions.py new file mode 100644 index 0000000..9e20b3e --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/exceptions.py @@ -0,0 +1,287 @@ +import os +import typing as t +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import get_text_stderr +from .utils import echo + +if t.TYPE_CHECKING: + from .core import Context + from .core import Parameter + + +def _join_param_hints( + param_hint: t.Optional[t.Union[t.Sequence[str], str]] +) -> t.Optional[str]: + if param_hint is not None and not isinstance(param_hint, str): + return " / ".join(repr(x) for x in param_hint) + + return param_hint + + +class ClickException(Exception): + """An exception that Click can handle and show to the user.""" + + #: The exit code for this exception. + exit_code = 1 + + def __init__(self, message: str) -> None: + super().__init__(message) + self.message = message + + def format_message(self) -> str: + return self.message + + def __str__(self) -> str: + return self.message + + def show(self, file: t.Optional[t.IO] = None) -> None: + if file is None: + file = get_text_stderr() + + echo(_("Error: {message}").format(message=self.format_message()), file=file) + + +class UsageError(ClickException): + """An internal exception that signals a usage error. This typically + aborts any further handling. + + :param message: the error message to display. + :param ctx: optionally the context that caused this error. Click will + fill in the context automatically in some situations. + """ + + exit_code = 2 + + def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None: + super().__init__(message) + self.ctx = ctx + self.cmd = self.ctx.command if self.ctx else None + + def show(self, file: t.Optional[t.IO] = None) -> None: + if file is None: + file = get_text_stderr() + color = None + hint = "" + if ( + self.ctx is not None + and self.ctx.command.get_help_option(self.ctx) is not None + ): + hint = _("Try '{command} {option}' for help.").format( + command=self.ctx.command_path, option=self.ctx.help_option_names[0] + ) + hint = f"{hint}\n" + if self.ctx is not None: + color = self.ctx.color + echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) + echo( + _("Error: {message}").format(message=self.format_message()), + file=file, + color=color, + ) + + +class BadParameter(UsageError): + """An exception that formats out a standardized error message for a + bad parameter. This is useful when thrown from a callback or type as + Click will attach contextual information to it (for instance, which + parameter it is). + + .. versionadded:: 2.0 + + :param param: the parameter object that caused this error. This can + be left out, and Click will attach this info itself + if possible. + :param param_hint: a string that shows up as parameter name. This + can be used as alternative to `param` in cases + where custom validation should happen. If it is + a string it's used as such, if it's a list then + each item is quoted and separated. + """ + + def __init__( + self, + message: str, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + ) -> None: + super().__init__(message, ctx) + self.param = param + self.param_hint = param_hint + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + return _("Invalid value: {message}").format(message=self.message) + + return _("Invalid value for {param_hint}: {message}").format( + param_hint=_join_param_hints(param_hint), message=self.message + ) + + +class MissingParameter(BadParameter): + """Raised if click required an option or argument but it was not + provided when invoking the script. + + .. versionadded:: 4.0 + + :param param_type: a string that indicates the type of the parameter. + The default is to inherit the parameter type from + the given `param`. Valid values are ``'parameter'``, + ``'option'`` or ``'argument'``. + """ + + def __init__( + self, + message: t.Optional[str] = None, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + param_type: t.Optional[str] = None, + ) -> None: + super().__init__(message or "", ctx, param, param_hint) + self.param_type = param_type + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint: t.Optional[str] = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + param_hint = None + + param_hint = _join_param_hints(param_hint) + param_hint = f" {param_hint}" if param_hint else "" + + param_type = self.param_type + if param_type is None and self.param is not None: + param_type = self.param.param_type_name + + msg = self.message + if self.param is not None: + msg_extra = self.param.type.get_missing_message(self.param) + if msg_extra: + if msg: + msg += f". {msg_extra}" + else: + msg = msg_extra + + msg = f" {msg}" if msg else "" + + # Translate param_type for known types. + if param_type == "argument": + missing = _("Missing argument") + elif param_type == "option": + missing = _("Missing option") + elif param_type == "parameter": + missing = _("Missing parameter") + else: + missing = _("Missing {param_type}").format(param_type=param_type) + + return f"{missing}{param_hint}.{msg}" + + def __str__(self) -> str: + if not self.message: + param_name = self.param.name if self.param else None + return _("Missing parameter: {param_name}").format(param_name=param_name) + else: + return self.message + + +class NoSuchOption(UsageError): + """Raised if click attempted to handle an option that does not + exist. + + .. versionadded:: 4.0 + """ + + def __init__( + self, + option_name: str, + message: t.Optional[str] = None, + possibilities: t.Optional[t.Sequence[str]] = None, + ctx: t.Optional["Context"] = None, + ) -> None: + if message is None: + message = _("No such option: {name}").format(name=option_name) + + super().__init__(message, ctx) + self.option_name = option_name + self.possibilities = possibilities + + def format_message(self) -> str: + if not self.possibilities: + return self.message + + possibility_str = ", ".join(sorted(self.possibilities)) + suggest = ngettext( + "Did you mean {possibility}?", + "(Possible options: {possibilities})", + len(self.possibilities), + ).format(possibility=possibility_str, possibilities=possibility_str) + return f"{self.message} {suggest}" + + +class BadOptionUsage(UsageError): + """Raised if an option is generally supplied but the use of the option + was incorrect. This is for instance raised if the number of arguments + for an option is not correct. + + .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. + """ + + def __init__( + self, option_name: str, message: str, ctx: t.Optional["Context"] = None + ) -> None: + super().__init__(message, ctx) + self.option_name = option_name + + +class BadArgumentUsage(UsageError): + """Raised if an argument is generally supplied but the use of the argument + was incorrect. This is for instance raised if the number of values + for an argument is not correct. + + .. versionadded:: 6.0 + """ + + +class FileError(ClickException): + """Raised if a file cannot be opened.""" + + def __init__(self, filename: str, hint: t.Optional[str] = None) -> None: + if hint is None: + hint = _("unknown error") + + super().__init__(hint) + self.ui_filename = os.fsdecode(filename) + self.filename = filename + + def format_message(self) -> str: + return _("Could not open file {filename!r}: {message}").format( + filename=self.ui_filename, message=self.message + ) + + +class Abort(RuntimeError): + """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + + __slots__ = ("exit_code",) + + def __init__(self, code: int = 0) -> None: + self.exit_code = code diff --git a/venv/lib/python3.10/site-packages/click/formatting.py b/venv/lib/python3.10/site-packages/click/formatting.py new file mode 100644 index 0000000..ddd2a2f --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/formatting.py @@ -0,0 +1,301 @@ +import typing as t +from contextlib import contextmanager +from gettext import gettext as _ + +from ._compat import term_len +from .parser import split_opt + +# Can force a width. This is used by the test system +FORCED_WIDTH: t.Optional[int] = None + + +def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]: + widths: t.Dict[int, int] = {} + + for row in rows: + for idx, col in enumerate(row): + widths[idx] = max(widths.get(idx, 0), term_len(col)) + + return tuple(y for x, y in sorted(widths.items())) + + +def iter_rows( + rows: t.Iterable[t.Tuple[str, str]], col_count: int +) -> t.Iterator[t.Tuple[str, ...]]: + for row in rows: + yield row + ("",) * (col_count - len(row)) + + +def wrap_text( + text: str, + width: int = 78, + initial_indent: str = "", + subsequent_indent: str = "", + preserve_paragraphs: bool = False, +) -> str: + """A helper function that intelligently wraps text. By default, it + assumes that it operates on a single paragraph of text but if the + `preserve_paragraphs` parameter is provided it will intelligently + handle paragraphs (defined by two empty lines). + + If paragraphs are handled, a paragraph can be prefixed with an empty + line containing the ``\\b`` character (``\\x08``) to indicate that + no rewrapping should happen in that block. + + :param text: the text that should be rewrapped. + :param width: the maximum width for the text. + :param initial_indent: the initial indent that should be placed on the + first line as a string. + :param subsequent_indent: the indent string that should be placed on + each consecutive line. + :param preserve_paragraphs: if this flag is set then the wrapping will + intelligently handle paragraphs. + """ + from ._textwrap import TextWrapper + + text = text.expandtabs() + wrapper = TextWrapper( + width, + initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + replace_whitespace=False, + ) + if not preserve_paragraphs: + return wrapper.fill(text) + + p: t.List[t.Tuple[int, bool, str]] = [] + buf: t.List[str] = [] + indent = None + + def _flush_par() -> None: + if not buf: + return + if buf[0].strip() == "\b": + p.append((indent or 0, True, "\n".join(buf[1:]))) + else: + p.append((indent or 0, False, " ".join(buf))) + del buf[:] + + for line in text.splitlines(): + if not line: + _flush_par() + indent = None + else: + if indent is None: + orig_len = term_len(line) + line = line.lstrip() + indent = orig_len - term_len(line) + buf.append(line) + _flush_par() + + rv = [] + for indent, raw, text in p: + with wrapper.extra_indent(" " * indent): + if raw: + rv.append(wrapper.indent_only(text)) + else: + rv.append(wrapper.fill(text)) + + return "\n\n".join(rv) + + +class HelpFormatter: + """This class helps with formatting text-based help pages. It's + usually just needed for very special internal cases, but it's also + exposed so that developers can write their own fancy outputs. + + At present, it always writes into memory. + + :param indent_increment: the additional increment for each level. + :param width: the width for the text. This defaults to the terminal + width clamped to a maximum of 78. + """ + + def __init__( + self, + indent_increment: int = 2, + width: t.Optional[int] = None, + max_width: t.Optional[int] = None, + ) -> None: + import shutil + + self.indent_increment = indent_increment + if max_width is None: + max_width = 80 + if width is None: + width = FORCED_WIDTH + if width is None: + width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) + self.width = width + self.current_indent = 0 + self.buffer: t.List[str] = [] + + def write(self, string: str) -> None: + """Writes a unicode string into the internal buffer.""" + self.buffer.append(string) + + def indent(self) -> None: + """Increases the indentation.""" + self.current_indent += self.indent_increment + + def dedent(self) -> None: + """Decreases the indentation.""" + self.current_indent -= self.indent_increment + + def write_usage( + self, prog: str, args: str = "", prefix: t.Optional[str] = None + ) -> None: + """Writes a usage line into the buffer. + + :param prog: the program name. + :param args: whitespace separated list of arguments. + :param prefix: The prefix for the first line. Defaults to + ``"Usage: "``. + """ + if prefix is None: + prefix = f"{_('Usage:')} " + + usage_prefix = f"{prefix:>{self.current_indent}}{prog} " + text_width = self.width - self.current_indent + + if text_width >= (term_len(usage_prefix) + 20): + # The arguments will fit to the right of the prefix. + indent = " " * term_len(usage_prefix) + self.write( + wrap_text( + args, + text_width, + initial_indent=usage_prefix, + subsequent_indent=indent, + ) + ) + else: + # The prefix is too long, put the arguments on the next line. + self.write(usage_prefix) + self.write("\n") + indent = " " * (max(self.current_indent, term_len(prefix)) + 4) + self.write( + wrap_text( + args, text_width, initial_indent=indent, subsequent_indent=indent + ) + ) + + self.write("\n") + + def write_heading(self, heading: str) -> None: + """Writes a heading into the buffer.""" + self.write(f"{'':>{self.current_indent}}{heading}:\n") + + def write_paragraph(self) -> None: + """Writes a paragraph into the buffer.""" + if self.buffer: + self.write("\n") + + def write_text(self, text: str) -> None: + """Writes re-indented text into the buffer. This rewraps and + preserves paragraphs. + """ + indent = " " * self.current_indent + self.write( + wrap_text( + text, + self.width, + initial_indent=indent, + subsequent_indent=indent, + preserve_paragraphs=True, + ) + ) + self.write("\n") + + def write_dl( + self, + rows: t.Sequence[t.Tuple[str, str]], + col_max: int = 30, + col_spacing: int = 2, + ) -> None: + """Writes a definition list into the buffer. This is how options + and commands are usually formatted. + + :param rows: a list of two item tuples for the terms and values. + :param col_max: the maximum width of the first column. + :param col_spacing: the number of spaces between the first and + second column. + """ + rows = list(rows) + widths = measure_table(rows) + if len(widths) != 2: + raise TypeError("Expected two columns for definition list") + + first_col = min(widths[0], col_max) + col_spacing + + for first, second in iter_rows(rows, len(widths)): + self.write(f"{'':>{self.current_indent}}{first}") + if not second: + self.write("\n") + continue + if term_len(first) <= first_col - col_spacing: + self.write(" " * (first_col - term_len(first))) + else: + self.write("\n") + self.write(" " * (first_col + self.current_indent)) + + text_width = max(self.width - first_col - 2, 10) + wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) + lines = wrapped_text.splitlines() + + if lines: + self.write(f"{lines[0]}\n") + + for line in lines[1:]: + self.write(f"{'':>{first_col + self.current_indent}}{line}\n") + else: + self.write("\n") + + @contextmanager + def section(self, name: str) -> t.Iterator[None]: + """Helpful context manager that writes a paragraph, a heading, + and the indents. + + :param name: the section name that is written as heading. + """ + self.write_paragraph() + self.write_heading(name) + self.indent() + try: + yield + finally: + self.dedent() + + @contextmanager + def indentation(self) -> t.Iterator[None]: + """A context manager that increases the indentation.""" + self.indent() + try: + yield + finally: + self.dedent() + + def getvalue(self) -> str: + """Returns the buffer contents.""" + return "".join(self.buffer) + + +def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]: + """Given a list of option strings this joins them in the most appropriate + way and returns them in the form ``(formatted_string, + any_prefix_is_slash)`` where the second item in the tuple is a flag that + indicates if any of the option prefixes was a slash. + """ + rv = [] + any_prefix_is_slash = False + + for opt in options: + prefix = split_opt(opt)[0] + + if prefix == "/": + any_prefix_is_slash = True + + rv.append((len(prefix), opt)) + + rv.sort(key=lambda x: x[0]) + return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/venv/lib/python3.10/site-packages/click/globals.py b/venv/lib/python3.10/site-packages/click/globals.py new file mode 100644 index 0000000..480058f --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/globals.py @@ -0,0 +1,68 @@ +import typing as t +from threading import local + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + +_local = local() + + +@t.overload +def get_current_context(silent: "te.Literal[False]" = False) -> "Context": + ... + + +@t.overload +def get_current_context(silent: bool = ...) -> t.Optional["Context"]: + ... + + +def get_current_context(silent: bool = False) -> t.Optional["Context"]: + """Returns the current click context. This can be used as a way to + access the current context object from anywhere. This is a more implicit + alternative to the :func:`pass_context` decorator. This function is + primarily useful for helpers such as :func:`echo` which might be + interested in changing its behavior based on the current context. + + To push the current context, :meth:`Context.scope` can be used. + + .. versionadded:: 5.0 + + :param silent: if set to `True` the return value is `None` if no context + is available. The default behavior is to raise a + :exc:`RuntimeError`. + """ + try: + return t.cast("Context", _local.stack[-1]) + except (AttributeError, IndexError) as e: + if not silent: + raise RuntimeError("There is no active click context.") from e + + return None + + +def push_context(ctx: "Context") -> None: + """Pushes a new context to the current stack.""" + _local.__dict__.setdefault("stack", []).append(ctx) + + +def pop_context() -> None: + """Removes the top level from the stack.""" + _local.stack.pop() + + +def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]: + """Internal helper to get the default value of the color flag. If a + value is passed it's returned unchanged, otherwise it's looked up from + the current context. + """ + if color is not None: + return color + + ctx = get_current_context(silent=True) + + if ctx is not None: + return ctx.color + + return None diff --git a/venv/lib/python3.10/site-packages/click/parser.py b/venv/lib/python3.10/site-packages/click/parser.py new file mode 100644 index 0000000..2d5a2ed --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/parser.py @@ -0,0 +1,529 @@ +""" +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). + +The plan is to remove more and more from here over time. + +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright 2001-2006 Gregory P. Ward. All rights reserved. +Copyright 2002-2006 Python Software Foundation. All rights reserved. +""" +# This code uses parts of optparse written by Gregory P. Ward and +# maintained by the Python Software Foundation. +# Copyright 2001-2006 Gregory P. Ward +# Copyright 2002-2006 Python Software Foundation +import typing as t +from collections import deque +from gettext import gettext as _ +from gettext import ngettext + +from .exceptions import BadArgumentUsage +from .exceptions import BadOptionUsage +from .exceptions import NoSuchOption +from .exceptions import UsageError + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Argument as CoreArgument + from .core import Context + from .core import Option as CoreOption + from .core import Parameter as CoreParameter + +V = t.TypeVar("V") + +# Sentinel value that indicates an option was passed as a flag without a +# value but is not a flag option. Option.consume_value uses this to +# prompt or use the flag_value. +_flag_needs_value = object() + + +def _unpack_args( + args: t.Sequence[str], nargs_spec: t.Sequence[int] +) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]: + """Given an iterable of arguments and an iterable of nargs specifications, + it returns a tuple with all the unpacked arguments at the first index + and all remaining arguments as the second. + + The nargs specification is the number of arguments that should be consumed + or `-1` to indicate that this position should eat up all the remainders. + + Missing items are filled with `None`. + """ + args = deque(args) + nargs_spec = deque(nargs_spec) + rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = [] + spos: t.Optional[int] = None + + def _fetch(c: "te.Deque[V]") -> t.Optional[V]: + try: + if spos is None: + return c.popleft() + else: + return c.pop() + except IndexError: + return None + + while nargs_spec: + nargs = _fetch(nargs_spec) + + if nargs is None: + continue + + if nargs == 1: + rv.append(_fetch(args)) + elif nargs > 1: + x = [_fetch(args) for _ in range(nargs)] + + # If we're reversed, we're pulling in the arguments in reverse, + # so we need to turn them around. + if spos is not None: + x.reverse() + + rv.append(tuple(x)) + elif nargs < 0: + if spos is not None: + raise TypeError("Cannot have two nargs < 0") + + spos = len(rv) + rv.append(None) + + # spos is the position of the wildcard (star). If it's not `None`, + # we fill it with the remainder. + if spos is not None: + rv[spos] = tuple(args) + args = [] + rv[spos + 1 :] = reversed(rv[spos + 1 :]) + + return tuple(rv), list(args) + + +def split_opt(opt: str) -> t.Tuple[str, str]: + first = opt[:1] + if first.isalnum(): + return "", opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str: + if ctx is None or ctx.token_normalize_func is None: + return opt + prefix, opt = split_opt(opt) + return f"{prefix}{ctx.token_normalize_func(opt)}" + + +def split_arg_string(string: str) -> t.List[str]: + """Split an argument string as with :func:`shlex.split`, but don't + fail if the string is incomplete. Ignores a missing closing quote or + incomplete escape sequence and uses the partial token as-is. + + .. code-block:: python + + split_arg_string("example 'my file") + ["example", "my file"] + + split_arg_string("example my\\") + ["example", "my"] + + :param string: String to split. + """ + import shlex + + lex = shlex.shlex(string, posix=True) + lex.whitespace_split = True + lex.commenters = "" + out = [] + + try: + for token in lex: + out.append(token) + except ValueError: + # Raised when end-of-string is reached in an invalid state. Use + # the partial token as-is. The quote or escape character is in + # lex.state, not lex.token. + out.append(lex.token) + + return out + + +class Option: + def __init__( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ): + self._short_opts = [] + self._long_opts = [] + self.prefixes = set() + + for opt in opts: + prefix, value = split_opt(opt) + if not prefix: + raise ValueError(f"Invalid start character for option ({opt})") + self.prefixes.add(prefix[0]) + if len(prefix) == 1 and len(value) == 1: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + self.prefixes.add(prefix) + + if action is None: + action = "store" + + self.dest = dest + self.action = action + self.nargs = nargs + self.const = const + self.obj = obj + + @property + def takes_value(self) -> bool: + return self.action in ("store", "append") + + def process(self, value: str, state: "ParsingState") -> None: + if self.action == "store": + state.opts[self.dest] = value # type: ignore + elif self.action == "store_const": + state.opts[self.dest] = self.const # type: ignore + elif self.action == "append": + state.opts.setdefault(self.dest, []).append(value) # type: ignore + elif self.action == "append_const": + state.opts.setdefault(self.dest, []).append(self.const) # type: ignore + elif self.action == "count": + state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore + else: + raise ValueError(f"unknown action '{self.action}'") + state.order.append(self.obj) + + +class Argument: + def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1): + self.dest = dest + self.nargs = nargs + self.obj = obj + + def process( + self, + value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]], + state: "ParsingState", + ) -> None: + if self.nargs > 1: + assert value is not None + holes = sum(1 for x in value if x is None) + if holes == len(value): + value = None + elif holes != 0: + raise BadArgumentUsage( + _("Argument {name!r} takes {nargs} values.").format( + name=self.dest, nargs=self.nargs + ) + ) + + if self.nargs == -1 and self.obj.envvar is not None and value == (): + # Replace empty tuple with None so that a value from the + # environment may be tried. + value = None + + state.opts[self.dest] = value # type: ignore + state.order.append(self.obj) + + +class ParsingState: + def __init__(self, rargs: t.List[str]) -> None: + self.opts: t.Dict[str, t.Any] = {} + self.largs: t.List[str] = [] + self.rargs = rargs + self.order: t.List["CoreParameter"] = [] + + +class OptionParser: + """The option parser is an internal class that is ultimately used to + parse options and arguments. It's modelled after optparse and brings + a similar but vastly simplified API. It should generally not be used + directly as the high level Click classes wrap it for you. + + It's not nearly as extensible as optparse or argparse as it does not + implement features that are implemented on a higher level (such as + types or defaults). + + :param ctx: optionally the :class:`~click.Context` where this parser + should go with. + """ + + def __init__(self, ctx: t.Optional["Context"] = None) -> None: + #: The :class:`~click.Context` for this parser. This might be + #: `None` for some advanced use cases. + self.ctx = ctx + #: This controls how the parser deals with interspersed arguments. + #: If this is set to `False`, the parser will stop on the first + #: non-option. Click uses this to implement nested subcommands + #: safely. + self.allow_interspersed_args = True + #: This tells the parser how to deal with unknown options. By + #: default it will error out (which is sensible), but there is a + #: second mode where it will ignore it and continue processing + #: after shifting all the unknown options into the resulting args. + self.ignore_unknown_options = False + + if ctx is not None: + self.allow_interspersed_args = ctx.allow_interspersed_args + self.ignore_unknown_options = ctx.ignore_unknown_options + + self._short_opt: t.Dict[str, Option] = {} + self._long_opt: t.Dict[str, Option] = {} + self._opt_prefixes = {"-", "--"} + self._args: t.List[Argument] = [] + + def add_option( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ) -> None: + """Adds a new option named `dest` to the parser. The destination + is not inferred (unlike with optparse) and needs to be explicitly + provided. Action can be any of ``store``, ``store_const``, + ``append``, ``append_const`` or ``count``. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + opts = [normalize_opt(opt, self.ctx) for opt in opts] + option = Option(obj, opts, dest, action=action, nargs=nargs, const=const) + self._opt_prefixes.update(option.prefixes) + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + def add_argument( + self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1 + ) -> None: + """Adds a positional argument named `dest` to the parser. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + self._args.append(Argument(obj, dest=dest, nargs=nargs)) + + def parse_args( + self, args: t.List[str] + ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]: + """Parses positional arguments and returns ``(values, args, order)`` + for the parsed options and arguments as well as the leftover + arguments if there are any. The order is a list of objects as they + appear on the command line. If arguments appear multiple times they + will be memorized multiple times as well. + """ + state = ParsingState(args) + try: + self._process_args_for_options(state) + self._process_args_for_args(state) + except UsageError: + if self.ctx is None or not self.ctx.resilient_parsing: + raise + return state.opts, state.largs, state.order + + def _process_args_for_args(self, state: ParsingState) -> None: + pargs, args = _unpack_args( + state.largs + state.rargs, [x.nargs for x in self._args] + ) + + for idx, arg in enumerate(self._args): + arg.process(pargs[idx], state) + + state.largs = args + state.rargs = [] + + def _process_args_for_options(self, state: ParsingState) -> None: + while state.rargs: + arg = state.rargs.pop(0) + arglen = len(arg) + # Double dashes always handled explicitly regardless of what + # prefixes are valid. + if arg == "--": + return + elif arg[:1] in self._opt_prefixes and arglen > 1: + self._process_opts(arg, state) + elif self.allow_interspersed_args: + state.largs.append(arg) + else: + state.rargs.insert(0, arg) + return + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt( + self, opt: str, explicit_value: t.Optional[str], state: ParsingState + ) -> None: + if opt not in self._long_opt: + from difflib import get_close_matches + + possibilities = get_close_matches(opt, self._long_opt) + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) + + option = self._long_opt[opt] + if option.takes_value: + # At this point it's safe to modify rargs by injecting the + # explicit value, because no exception is raised in this + # branch. This means that the inserted value will be fully + # consumed. + if explicit_value is not None: + state.rargs.insert(0, explicit_value) + + value = self._get_value_from_state(opt, option, state) + + elif explicit_value is not None: + raise BadOptionUsage( + opt, _("Option {name!r} does not take a value.").format(name=opt) + ) + + else: + value = None + + option.process(value, state) + + def _match_short_opt(self, arg: str, state: ParsingState) -> None: + stop = False + i = 1 + prefix = arg[0] + unknown_options = [] + + for ch in arg[1:]: + opt = normalize_opt(f"{prefix}{ch}", self.ctx) + option = self._short_opt.get(opt) + i += 1 + + if not option: + if self.ignore_unknown_options: + unknown_options.append(ch) + continue + raise NoSuchOption(opt, ctx=self.ctx) + if option.takes_value: + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + state.rargs.insert(0, arg[i:]) + stop = True + + value = self._get_value_from_state(opt, option, state) + + else: + value = None + + option.process(value, state) + + if stop: + break + + # If we got any unknown options we re-combinate the string of the + # remaining options and re-attach the prefix, then report that + # to the state as new larg. This way there is basic combinatorics + # that can be achieved while still ignoring unknown arguments. + if self.ignore_unknown_options and unknown_options: + state.largs.append(f"{prefix}{''.join(unknown_options)}") + + def _get_value_from_state( + self, option_name: str, option: Option, state: ParsingState + ) -> t.Any: + nargs = option.nargs + + if len(state.rargs) < nargs: + if option.obj._flag_needs_value: + # Option allows omitting the value. + value = _flag_needs_value + else: + raise BadOptionUsage( + option_name, + ngettext( + "Option {name!r} requires an argument.", + "Option {name!r} requires {nargs} arguments.", + nargs, + ).format(name=option_name, nargs=nargs), + ) + elif nargs == 1: + next_rarg = state.rargs[0] + + if ( + option.obj._flag_needs_value + and isinstance(next_rarg, str) + and next_rarg[:1] in self._opt_prefixes + and len(next_rarg) > 1 + ): + # The next arg looks like the start of an option, don't + # use it as the value if omitting the value is allowed. + value = _flag_needs_value + else: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + return value + + def _process_opts(self, arg: str, state: ParsingState) -> None: + explicit_value = None + # Long option handling happens in two parts. The first part is + # supporting explicitly attached values. In any case, we will try + # to long match the option first. + if "=" in arg: + long_opt, explicit_value = arg.split("=", 1) + else: + long_opt = arg + norm_long_opt = normalize_opt(long_opt, self.ctx) + + # At this point we will match the (assumed) long option through + # the long option matching code. Note that this allows options + # like "-foo" to be matched as long options. + try: + self._match_long_opt(norm_long_opt, explicit_value, state) + except NoSuchOption: + # At this point the long option matching failed, and we need + # to try with short options. However there is a special rule + # which says, that if we have a two character options prefix + # (applies to "--foo" for instance), we do not dispatch to the + # short option code and will instead raise the no option + # error. + if arg[:2] not in self._opt_prefixes: + self._match_short_opt(arg, state) + return + + if not self.ignore_unknown_options: + raise + + state.largs.append(arg) diff --git a/venv/lib/python3.10/site-packages/click/py.typed b/venv/lib/python3.10/site-packages/click/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/click/shell_completion.py b/venv/lib/python3.10/site-packages/click/shell_completion.py new file mode 100644 index 0000000..c17a8e6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/shell_completion.py @@ -0,0 +1,580 @@ +import os +import re +import typing as t +from gettext import gettext as _ + +from .core import Argument +from .core import BaseCommand +from .core import Context +from .core import MultiCommand +from .core import Option +from .core import Parameter +from .core import ParameterSource +from .parser import split_arg_string +from .utils import echo + + +def shell_complete( + cli: BaseCommand, + ctx_args: t.Dict[str, t.Any], + prog_name: str, + complete_var: str, + instruction: str, +) -> int: + """Perform shell completion for the given CLI program. + + :param cli: Command being called. + :param ctx_args: Extra arguments to pass to + ``cli.make_context``. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + :param instruction: Value of ``complete_var`` with the completion + instruction and shell, in the form ``instruction_shell``. + :return: Status code to exit with. + """ + shell, _, instruction = instruction.partition("_") + comp_cls = get_completion_class(shell) + + if comp_cls is None: + return 1 + + comp = comp_cls(cli, ctx_args, prog_name, complete_var) + + if instruction == "source": + echo(comp.source()) + return 0 + + if instruction == "complete": + echo(comp.complete()) + return 0 + + return 1 + + +class CompletionItem: + """Represents a completion value and metadata about the value. The + default metadata is ``type`` to indicate special shell handling, + and ``help`` if a shell supports showing a help string next to the + value. + + Arbitrary parameters can be passed when creating the object, and + accessed using ``item.attr``. If an attribute wasn't passed, + accessing it returns ``None``. + + :param value: The completion suggestion. + :param type: Tells the shell script to provide special completion + support for the type. Click uses ``"dir"`` and ``"file"``. + :param help: String shown next to the value if supported. + :param kwargs: Arbitrary metadata. The built-in implementations + don't use this, but custom type completions paired with custom + shell support could use it. + """ + + __slots__ = ("value", "type", "help", "_info") + + def __init__( + self, + value: t.Any, + type: str = "plain", + help: t.Optional[str] = None, + **kwargs: t.Any, + ) -> None: + self.value = value + self.type = type + self.help = help + self._info = kwargs + + def __getattr__(self, name: str) -> t.Any: + return self._info.get(name) + + +# Only Bash >= 4.4 has the nosort option. +_SOURCE_BASH = """\ +%(complete_func)s() { + local IFS=$'\\n' + local response + + response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ +%(complete_var)s=bash_complete $1) + + for completion in $response; do + IFS=',' read type value <<< "$completion" + + if [[ $type == 'dir' ]]; then + COMPREPLY=() + compopt -o dirnames + elif [[ $type == 'file' ]]; then + COMPREPLY=() + compopt -o default + elif [[ $type == 'plain' ]]; then + COMPREPLY+=($value) + fi + done + + return 0 +} + +%(complete_func)s_setup() { + complete -o nosort -F %(complete_func)s %(prog_name)s +} + +%(complete_func)s_setup; +""" + +_SOURCE_ZSH = """\ +#compdef %(prog_name)s + +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + (( ! $+commands[%(prog_name)s] )) && return 1 + + response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ +%(complete_var)s=zsh_complete %(prog_name)s)}") + + for type key descr in ${response}; do + if [[ "$type" == "plain" ]]; then + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + elif [[ "$type" == "dir" ]]; then + _path_files -/ + elif [[ "$type" == "file" ]]; then + _path_files -f + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -a completions + fi +} + +compdef %(complete_func)s %(prog_name)s; +""" + +_SOURCE_FISH = """\ +function %(complete_func)s; + set -l response; + + for value in (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ +COMP_CWORD=(commandline -t) %(prog_name)s); + set response $response $value; + end; + + for completion in $response; + set -l metadata (string split "," $completion); + + if test $metadata[1] = "dir"; + __fish_complete_directories $metadata[2]; + else if test $metadata[1] = "file"; + __fish_complete_path $metadata[2]; + else if test $metadata[1] = "plain"; + echo $metadata[2]; + end; + end; +end; + +complete --no-files --command %(prog_name)s --arguments \ +"(%(complete_func)s)"; +""" + + +class ShellComplete: + """Base class for providing shell completion support. A subclass for + a given shell will override attributes and methods to implement the + completion instructions (``source`` and ``complete``). + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + + .. versionadded:: 8.0 + """ + + name: t.ClassVar[str] + """Name to register the shell as with :func:`add_completion_class`. + This is used in completion instructions (``{name}_source`` and + ``{name}_complete``). + """ + + source_template: t.ClassVar[str] + """Completion script template formatted by :meth:`source`. This must + be provided by subclasses. + """ + + def __init__( + self, + cli: BaseCommand, + ctx_args: t.Dict[str, t.Any], + prog_name: str, + complete_var: str, + ) -> None: + self.cli = cli + self.ctx_args = ctx_args + self.prog_name = prog_name + self.complete_var = complete_var + + @property + def func_name(self) -> str: + """The name of the shell function defined by the completion + script. + """ + safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), re.ASCII) + return f"_{safe_name}_completion" + + def source_vars(self) -> t.Dict[str, t.Any]: + """Vars for formatting :attr:`source_template`. + + By default this provides ``complete_func``, ``complete_var``, + and ``prog_name``. + """ + return { + "complete_func": self.func_name, + "complete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def source(self) -> str: + """Produce the shell script that defines the completion + function. By default this ``%``-style formats + :attr:`source_template` with the dict returned by + :meth:`source_vars`. + """ + return self.source_template % self.source_vars() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + """Use the env vars defined by the shell script to return a + tuple of ``args, incomplete``. This must be implemented by + subclasses. + """ + raise NotImplementedError + + def get_completions( + self, args: t.List[str], incomplete: str + ) -> t.List[CompletionItem]: + """Determine the context and last complete command or parameter + from the complete args. Call that object's ``shell_complete`` + method to get the completions for the incomplete value. + + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) + obj, incomplete = _resolve_incomplete(ctx, args, incomplete) + return obj.shell_complete(ctx, incomplete) + + def format_completion(self, item: CompletionItem) -> str: + """Format a completion item into the form recognized by the + shell script. This must be implemented by subclasses. + + :param item: Completion item to format. + """ + raise NotImplementedError + + def complete(self) -> str: + """Produce the completion data to send back to the shell. + + By default this calls :meth:`get_completion_args`, gets the + completions, then calls :meth:`format_completion` for each + completion. + """ + args, incomplete = self.get_completion_args() + completions = self.get_completions(args, incomplete) + out = [self.format_completion(item) for item in completions] + return "\n".join(out) + + +class BashComplete(ShellComplete): + """Shell completion for Bash.""" + + name = "bash" + source_template = _SOURCE_BASH + + def _check_version(self) -> None: + import subprocess + + output = subprocess.run( + ["bash", "-c", "echo ${BASH_VERSION}"], stdout=subprocess.PIPE + ) + match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) + + if match is not None: + major, minor = match.groups() + + if major < "4" or major == "4" and minor < "4": + raise RuntimeError( + _( + "Shell completion is not supported for Bash" + " versions older than 4.4." + ) + ) + else: + raise RuntimeError( + _("Couldn't detect Bash version, shell completion is not supported.") + ) + + def source(self) -> str: + self._check_version() + return super().source() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type},{item.value}" + + +class ZshComplete(ShellComplete): + """Shell completion for Zsh.""" + + name = "zsh" + source_template = _SOURCE_ZSH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" + + +class FishComplete(ShellComplete): + """Shell completion for Fish.""" + + name = "fish" + source_template = _SOURCE_FISH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + incomplete = os.environ["COMP_CWORD"] + args = cwords[1:] + + # Fish stores the partial word in both COMP_WORDS and + # COMP_CWORD, remove it from complete args. + if incomplete and args and args[-1] == incomplete: + args.pop() + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + if item.help: + return f"{item.type},{item.value}\t{item.help}" + + return f"{item.type},{item.value}" + + +_available_shells: t.Dict[str, t.Type[ShellComplete]] = { + "bash": BashComplete, + "fish": FishComplete, + "zsh": ZshComplete, +} + + +def add_completion_class( + cls: t.Type[ShellComplete], name: t.Optional[str] = None +) -> None: + """Register a :class:`ShellComplete` subclass under the given name. + The name will be provided by the completion instruction environment + variable during completion. + + :param cls: The completion class that will handle completion for the + shell. + :param name: Name to register the class under. Defaults to the + class's ``name`` attribute. + """ + if name is None: + name = cls.name + + _available_shells[name] = cls + + +def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]: + """Look up a registered :class:`ShellComplete` subclass by the name + provided by the completion instruction environment variable. If the + name isn't registered, returns ``None``. + + :param shell: Name the class is registered under. + """ + return _available_shells.get(shell) + + +def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: + """Determine if the given parameter is an argument that can still + accept values. + + :param ctx: Invocation context for the command represented by the + parsed complete args. + :param param: Argument object being checked. + """ + if not isinstance(param, Argument): + return False + + assert param.name is not None + value = ctx.params[param.name] + return ( + param.nargs == -1 + or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE + or ( + param.nargs > 1 + and isinstance(value, (tuple, list)) + and len(value) < param.nargs + ) + ) + + +def _start_of_option(ctx: Context, value: str) -> bool: + """Check if the value looks like the start of an option.""" + if not value: + return False + + c = value[0] + return c in ctx._opt_prefixes + + +def _is_incomplete_option(ctx: Context, args: t.List[str], param: Parameter) -> bool: + """Determine if the given parameter is an option that needs a value. + + :param args: List of complete args before the incomplete value. + :param param: Option object being checked. + """ + if not isinstance(param, Option): + return False + + if param.is_flag or param.count: + return False + + last_option = None + + for index, arg in enumerate(reversed(args)): + if index + 1 > param.nargs: + break + + if _start_of_option(ctx, arg): + last_option = arg + + return last_option is not None and last_option in param.opts + + +def _resolve_context( + cli: BaseCommand, ctx_args: t.Dict[str, t.Any], prog_name: str, args: t.List[str] +) -> Context: + """Produce the context hierarchy starting with the command and + traversing the complete arguments. This only follows the commands, + it doesn't trigger input prompts or callbacks. + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param args: List of complete args before the incomplete value. + """ + ctx_args["resilient_parsing"] = True + ctx = cli.make_context(prog_name, args.copy(), **ctx_args) + args = ctx.protected_args + ctx.args + + while args: + command = ctx.command + + if isinstance(command, MultiCommand): + if not command.chain: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True) + args = ctx.protected_args + ctx.args + else: + while args: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + sub_ctx = cmd.make_context( + name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True, + ) + args = sub_ctx.args + + ctx = sub_ctx + args = [*sub_ctx.protected_args, *sub_ctx.args] + else: + break + + return ctx + + +def _resolve_incomplete( + ctx: Context, args: t.List[str], incomplete: str +) -> t.Tuple[t.Union[BaseCommand, Parameter], str]: + """Find the Click object that will handle the completion of the + incomplete value. Return the object and the incomplete value. + + :param ctx: Invocation context for the command represented by + the parsed complete args. + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + # Different shells treat an "=" between a long option name and + # value differently. Might keep the value joined, return the "=" + # as a separate item, or return the split name and value. Always + # split and discard the "=" to make completion easier. + if incomplete == "=": + incomplete = "" + elif "=" in incomplete and _start_of_option(ctx, incomplete): + name, _, incomplete = incomplete.partition("=") + args.append(name) + + # The "--" marker tells Click to stop treating values as options + # even if they start with the option character. If it hasn't been + # given and the incomplete arg looks like an option, the current + # command will provide option name completions. + if "--" not in args and _start_of_option(ctx, incomplete): + return ctx.command, incomplete + + params = ctx.command.get_params(ctx) + + # If the last complete arg is an option name with an incomplete + # value, the option will provide value completions. + for param in params: + if _is_incomplete_option(ctx, args, param): + return param, incomplete + + # It's not an option name or value. The first argument without a + # parsed value will provide value completions. + for param in params: + if _is_incomplete_argument(ctx, param): + return param, incomplete + + # There were no unparsed arguments, the command may be a group that + # will provide command name completions. + return ctx.command, incomplete diff --git a/venv/lib/python3.10/site-packages/click/termui.py b/venv/lib/python3.10/site-packages/click/termui.py new file mode 100644 index 0000000..bfb2f5a --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/termui.py @@ -0,0 +1,787 @@ +import inspect +import io +import itertools +import os +import sys +import typing as t +from gettext import gettext as _ + +from ._compat import isatty +from ._compat import strip_ansi +from ._compat import WIN +from .exceptions import Abort +from .exceptions import UsageError +from .globals import resolve_color_default +from .types import Choice +from .types import convert_type +from .types import ParamType +from .utils import echo +from .utils import LazyFile + +if t.TYPE_CHECKING: + from ._termui_impl import ProgressBar + +V = t.TypeVar("V") + +# The prompt functions to use. The doc tools currently override these +# functions to customize how they work. +visible_prompt_func: t.Callable[[str], str] = input + +_ansi_colors = { + "black": 30, + "red": 31, + "green": 32, + "yellow": 33, + "blue": 34, + "magenta": 35, + "cyan": 36, + "white": 37, + "reset": 39, + "bright_black": 90, + "bright_red": 91, + "bright_green": 92, + "bright_yellow": 93, + "bright_blue": 94, + "bright_magenta": 95, + "bright_cyan": 96, + "bright_white": 97, +} +_ansi_reset_all = "\033[0m" + + +def hidden_prompt_func(prompt: str) -> str: + import getpass + + return getpass.getpass(prompt) + + +def _build_prompt( + text: str, + suffix: str, + show_default: bool = False, + default: t.Optional[t.Any] = None, + show_choices: bool = True, + type: t.Optional[ParamType] = None, +) -> str: + prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += f" ({', '.join(map(str, type.choices))})" + if default is not None and show_default: + prompt = f"{prompt} [{_format_default(default)}]" + return f"{prompt}{suffix}" + + +def _format_default(default: t.Any) -> t.Any: + if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): + return default.name # type: ignore + + return default + + +def prompt( + text: str, + default: t.Optional[t.Any] = None, + hide_input: bool = False, + confirmation_prompt: t.Union[bool, str] = False, + type: t.Optional[t.Union[ParamType, t.Any]] = None, + value_proc: t.Optional[t.Callable[[str], t.Any]] = None, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, + show_choices: bool = True, +) -> t.Any: + """Prompts a user for input. This is a convenience function that can + be used to prompt a user for input later. + + If the user aborts the input by sending an interrupt signal, this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the text to show for the prompt. + :param default: the default value to use if no input happens. If this + is not given it will prompt until it's aborted. + :param hide_input: if this is set to true then the input value will + be hidden. + :param confirmation_prompt: Prompt a second time to confirm the + value. Can be set to a string instead of ``True`` to customize + the message. + :param type: the type to use to check the value against. + :param value_proc: if this parameter is provided it's a function that + is invoked instead of the type conversion to + convert a value. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". + + .. versionadded:: 8.0 + ``confirmation_prompt`` can be a custom string. + + .. versionadded:: 7.0 + Added the ``show_choices`` parameter. + + .. versionadded:: 6.0 + Added unicode support for cmd.exe on Windows. + + .. versionadded:: 4.0 + Added the `err` parameter. + + """ + + def prompt_func(text: str) -> str: + f = hidden_prompt_func if hide_input else visible_prompt_func + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(text.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + return f(" ") + except (KeyboardInterrupt, EOFError): + # getpass doesn't print a newline if the user aborts input with ^C. + # Allegedly this behavior is inherited from getpass(3). + # A doc bug has been filed at https://bugs.python.org/issue24711 + if hide_input: + echo(None, err=err) + raise Abort() from None + + if value_proc is None: + value_proc = convert_type(type, default) + + prompt = _build_prompt( + text, prompt_suffix, show_default, default, show_choices, type + ) + + if confirmation_prompt: + if confirmation_prompt is True: + confirmation_prompt = _("Repeat for confirmation") + + confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) + + while True: + while True: + value = prompt_func(prompt) + if value: + break + elif default is not None: + value = default + break + try: + result = value_proc(value) + except UsageError as e: + if hide_input: + echo(_("Error: The value you entered was invalid."), err=err) + else: + echo(_("Error: {e.message}").format(e=e), err=err) # noqa: B306 + continue + if not confirmation_prompt: + return result + while True: + value2 = prompt_func(confirmation_prompt) + is_empty = not value and not value2 + if value2 or is_empty: + break + if value == value2: + return result + echo(_("Error: The two entered values do not match."), err=err) + + +def confirm( + text: str, + default: t.Optional[bool] = False, + abort: bool = False, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, +) -> bool: + """Prompts for confirmation (yes/no question). + + If the user aborts the input by sending a interrupt signal this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the question to ask. + :param default: The default value to use when no input is given. If + ``None``, repeat until input is given. + :param abort: if this is set to `True` a negative answer aborts the + exception by raising :exc:`Abort`. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + + .. versionchanged:: 8.0 + Repeat until input is given if ``default`` is ``None``. + + .. versionadded:: 4.0 + Added the ``err`` parameter. + """ + prompt = _build_prompt( + text, + prompt_suffix, + show_default, + "y/n" if default is None else ("Y/n" if default else "y/N"), + ) + + while True: + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(prompt.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + value = visible_prompt_func(" ").lower().strip() + except (KeyboardInterrupt, EOFError): + raise Abort() from None + if value in ("y", "yes"): + rv = True + elif value in ("n", "no"): + rv = False + elif default is not None and value == "": + rv = default + else: + echo(_("Error: invalid input"), err=err) + continue + break + if abort and not rv: + raise Abort() + return rv + + +def echo_via_pager( + text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str], + color: t.Optional[bool] = None, +) -> None: + """This function takes a text and shows it via an environment specific + pager on stdout. + + .. versionchanged:: 3.0 + Added the `color` flag. + + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. + :param color: controls if the pager supports ANSI colors or not. The + default is autodetection. + """ + color = resolve_color_default(color) + + if inspect.isgeneratorfunction(text_or_generator): + i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)() + elif isinstance(text_or_generator, str): + i = [text_or_generator] + else: + i = iter(t.cast(t.Iterable[str], text_or_generator)) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, str) else str(el) for el in i) + + from ._termui_impl import pager + + return pager(itertools.chain(text_generator, "\n"), color) + + +def progressbar( + iterable: t.Optional[t.Iterable[V]] = None, + length: t.Optional[int] = None, + label: t.Optional[str] = None, + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, +) -> "ProgressBar[V]": + """This function creates an iterable context manager that can be used + to iterate over something while showing a progress bar. It will + either iterate over the `iterable` or `length` items (that are counted + up). While iteration happens, this function will print a rendered + progress bar to the given `file` (defaults to stdout) and will attempt + to calculate remaining time and more. By default, this progress bar + will not be rendered if the file is not a terminal. + + The context manager creates the progress bar. When the context + manager is entered the progress bar is already created. With every + iteration over the progress bar, the iterable passed to the bar is + advanced and the bar is updated. When the context manager exits, + a newline is printed and the progress bar is finalized on screen. + + Note: The progress bar is currently designed for use cases where the + total progress can be expected to take at least several seconds. + Because of this, the ProgressBar class object won't display + progress that is considered too fast, and progress where the time + between steps is less than a second. + + No printing must happen or the progress bar will be unintentionally + destroyed. + + Example usage:: + + with progressbar(items) as bar: + for item in bar: + do_something_with(item) + + Alternatively, if no iterable is specified, one can manually update the + progress bar through the `update()` method instead of directly + iterating over the progress bar. The update method accepts the number + of steps to increment the bar with:: + + with progressbar(length=chunks.total_bytes) as bar: + for chunk in chunks: + process_chunk(chunk) + bar.update(chunks.bytes) + + The ``update()`` method also takes an optional value specifying the + ``current_item`` at the new position. This is useful when used + together with ``item_show_func`` to customize the output for each + manual step:: + + with click.progressbar( + length=total_size, + label='Unzipping archive', + item_show_func=lambda a: a.filename + ) as bar: + for archive in zip_file: + archive.extract() + bar.update(archive.size, archive) + + :param iterable: an iterable to iterate over. If not provided the length + is required. + :param length: the number of items to iterate over. By default the + progressbar will attempt to ask the iterator about its + length, which might or might not work. If an iterable is + also provided this parameter can be used to override the + length. If an iterable is not provided the progress bar + will iterate over a range of that length. + :param label: the label to show next to the progress bar. + :param show_eta: enables or disables the estimated time display. This is + automatically disabled if the length cannot be + determined. + :param show_percent: enables or disables the percentage display. The + default is `True` if the iterable has a length or + `False` if not. + :param show_pos: enables or disables the absolute position display. The + default is `False`. + :param item_show_func: A function called with the current item which + can return a string to show next to the progress bar. If the + function returns ``None`` nothing is shown. The current item can + be ``None``, such as when entering and exiting the bar. + :param fill_char: the character to use to show the filled part of the + progress bar. + :param empty_char: the character to use to show the non-filled part of + the progress bar. + :param bar_template: the format string to use as template for the bar. + The parameters in it are ``label`` for the label, + ``bar`` for the progress bar and ``info`` for the + info section. + :param info_sep: the separator between multiple info items (eta etc.) + :param width: the width of the progress bar in characters, 0 means full + terminal width + :param file: The file to write to. If this is not a terminal then + only the label is printed. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are included anywhere in the progress bar output + which is not the case by default. + :param update_min_steps: Render only when this many updates have + completed. This allows tuning for very fast iterators. + + .. versionchanged:: 8.0 + Output is shown even if execution time is less than 0.5 seconds. + + .. versionchanged:: 8.0 + ``item_show_func`` shows the current item, not the previous one. + + .. versionchanged:: 8.0 + Labels are echoed if the output is not a TTY. Reverts a change + in 7.0 that removed all output. + + .. versionadded:: 8.0 + Added the ``update_min_steps`` parameter. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. Added the ``update`` method to + the object. + + .. versionadded:: 2.0 + """ + from ._termui_impl import ProgressBar + + color = resolve_color_default(color) + return ProgressBar( + iterable=iterable, + length=length, + show_eta=show_eta, + show_percent=show_percent, + show_pos=show_pos, + item_show_func=item_show_func, + fill_char=fill_char, + empty_char=empty_char, + bar_template=bar_template, + info_sep=info_sep, + file=file, + label=label, + width=width, + color=color, + update_min_steps=update_min_steps, + ) + + +def clear() -> None: + """Clears the terminal screen. This will have the effect of clearing + the whole visible space of the terminal and moving the cursor to the + top left. This does not do anything if not connected to a terminal. + + .. versionadded:: 2.0 + """ + if not isatty(sys.stdout): + return + if WIN: + os.system("cls") + else: + sys.stdout.write("\033[2J\033[1;1H") + + +def _interpret_color( + color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0 +) -> str: + if isinstance(color, int): + return f"{38 + offset};5;{color:d}" + + if isinstance(color, (tuple, list)): + r, g, b = color + return f"{38 + offset};2;{r:d};{g:d};{b:d}" + + return str(_ansi_colors[color] + offset) + + +def style( + text: t.Any, + fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bold: t.Optional[bool] = None, + dim: t.Optional[bool] = None, + underline: t.Optional[bool] = None, + overline: t.Optional[bool] = None, + italic: t.Optional[bool] = None, + blink: t.Optional[bool] = None, + reverse: t.Optional[bool] = None, + strikethrough: t.Optional[bool] = None, + reset: bool = True, +) -> str: + """Styles a text with ANSI styles and returns the new string. By + default the styling is self contained which means that at the end + of the string a reset code is issued. This can be prevented by + passing ``reset=False``. + + Examples:: + + click.echo(click.style('Hello World!', fg='green')) + click.echo(click.style('ATTENTION!', blink=True)) + click.echo(click.style('Some things', reverse=True, fg='cyan')) + click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) + + Supported color names: + + * ``black`` (might be a gray) + * ``red`` + * ``green`` + * ``yellow`` (might be an orange) + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` + * ``reset`` (reset the color code only) + + If the terminal supports it, color may also be specified as: + + - An integer in the interval [0, 255]. The terminal must support + 8-bit/256-color mode. + - An RGB tuple of three integers in [0, 255]. The terminal must + support 24-bit/true-color mode. + + See https://en.wikipedia.org/wiki/ANSI_color and + https://gist.github.com/XVilka/8346728 for more information. + + :param text: the string to style with ansi codes. + :param fg: if provided this will become the foreground color. + :param bg: if provided this will become the background color. + :param bold: if provided this will enable or disable bold mode. + :param dim: if provided this will enable or disable dim mode. This is + badly supported. + :param underline: if provided this will enable or disable underline. + :param overline: if provided this will enable or disable overline. + :param italic: if provided this will enable or disable italic. + :param blink: if provided this will enable or disable blinking. + :param reverse: if provided this will enable or disable inverse + rendering (foreground becomes background and the + other way round). + :param strikethrough: if provided this will enable or disable + striking through text. + :param reset: by default a reset-all code is added at the end of the + string which means that styles do not carry over. This + can be disabled to compose styles. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. + + .. versionchanged:: 8.0 + Added support for 256 and RGB color codes. + + .. versionchanged:: 8.0 + Added the ``strikethrough``, ``italic``, and ``overline`` + parameters. + + .. versionchanged:: 7.0 + Added support for bright colors. + + .. versionadded:: 2.0 + """ + if not isinstance(text, str): + text = str(text) + + bits = [] + + if fg: + try: + bits.append(f"\033[{_interpret_color(fg)}m") + except KeyError: + raise TypeError(f"Unknown color {fg!r}") from None + + if bg: + try: + bits.append(f"\033[{_interpret_color(bg, 10)}m") + except KeyError: + raise TypeError(f"Unknown color {bg!r}") from None + + if bold is not None: + bits.append(f"\033[{1 if bold else 22}m") + if dim is not None: + bits.append(f"\033[{2 if dim else 22}m") + if underline is not None: + bits.append(f"\033[{4 if underline else 24}m") + if overline is not None: + bits.append(f"\033[{53 if overline else 55}m") + if italic is not None: + bits.append(f"\033[{3 if italic else 23}m") + if blink is not None: + bits.append(f"\033[{5 if blink else 25}m") + if reverse is not None: + bits.append(f"\033[{7 if reverse else 27}m") + if strikethrough is not None: + bits.append(f"\033[{9 if strikethrough else 29}m") + bits.append(text) + if reset: + bits.append(_ansi_reset_all) + return "".join(bits) + + +def unstyle(text: str) -> str: + """Removes ANSI styling information from a string. Usually it's not + necessary to use this function as Click's echo function will + automatically remove styling if necessary. + + .. versionadded:: 2.0 + + :param text: the text to remove style information from. + """ + return strip_ansi(text) + + +def secho( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.AnyStr]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, + **styles: t.Any, +) -> None: + """This function combines :func:`echo` and :func:`style` into one + call. As such the following two calls are the same:: + + click.secho('Hello World!', fg='green') + click.echo(click.style('Hello World!', fg='green')) + + All keyword arguments are forwarded to the underlying functions + depending on which one they go with. + + Non-string types will be converted to :class:`str`. However, + :class:`bytes` are passed directly to :meth:`echo` without applying + style. If you want to style bytes that represent text, call + :meth:`bytes.decode` first. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. Bytes are + passed through without style applied. + + .. versionadded:: 2.0 + """ + if message is not None and not isinstance(message, (bytes, bytearray)): + message = style(message, **styles) + + return echo(message, file=file, nl=nl, err=err, color=color) + + +def edit( + text: t.Optional[t.AnyStr] = None, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + filename: t.Optional[str] = None, +) -> t.Optional[t.AnyStr]: + r"""Edits the given text in the defined editor. If an editor is given + (should be the full path to the executable but the regular operating + system search path is used for finding the executable) it overrides + the detected editor. Optionally, some environment variables can be + used. If the editor is closed without changes, `None` is returned. In + case a file is edited directly the return value is always `None` and + `require_save` and `extension` are ignored. + + If the editor cannot be opened a :exc:`UsageError` is raised. + + Note for Windows: to simplify cross-platform usage, the newlines are + automatically converted from POSIX to Windows and vice versa. As such, + the message here will have ``\n`` as newline markers. + + :param text: the text to edit. + :param editor: optionally the editor to use. Defaults to automatic + detection. + :param env: environment variables to forward to the editor. + :param require_save: if this is true, then not saving in the editor + will make the return value become `None`. + :param extension: the extension to tell the editor about. This defaults + to `.txt` but changing this might change syntax + highlighting. + :param filename: if provided it will edit this file instead of the + provided text contents. It will not use a temporary + file as an indirection in that case. + """ + from ._termui_impl import Editor + + ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) + + if filename is None: + return ed.edit(text) + + ed.edit_file(filename) + return None + + +def launch(url: str, wait: bool = False, locate: bool = False) -> int: + """This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, ``0`` indicates + success. + + Examples:: + + click.launch('https://click.palletsprojects.com/') + click.launch('/my/downloaded/file', locate=True) + + .. versionadded:: 2.0 + + :param url: URL or filename of the thing to launch. + :param wait: Wait for the program to exit before returning. This + only works if the launched program blocks. In particular, + ``xdg-open`` on Linux does not block. + :param locate: if this is set to `True` then instead of launching the + application associated with the URL it will attempt to + launch a file manager with the file located. This + might have weird effects if the URL does not point to + the filesystem. + """ + from ._termui_impl import open_url + + return open_url(url, wait=wait, locate=locate) + + +# If this is provided, getchar() calls into this instead. This is used +# for unittesting purposes. +_getchar: t.Optional[t.Callable[[bool], str]] = None + + +def getchar(echo: bool = False) -> str: + """Fetches a single character from the terminal and returns it. This + will always return a unicode character and under certain rare + circumstances this might return more than one character. The + situations which more than one character is returned is when for + whatever reason multiple characters end up in the terminal buffer or + standard input was not actually a terminal. + + Note that this will always read from the terminal, even if something + is piped into the standard input. + + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + + .. versionadded:: 2.0 + + :param echo: if set to `True`, the character read will also show up on + the terminal. The default is to not show it. + """ + global _getchar + + if _getchar is None: + from ._termui_impl import getchar as f + + _getchar = f + + return _getchar(echo) + + +def raw_terminal() -> t.ContextManager[int]: + from ._termui_impl import raw_terminal as f + + return f() + + +def pause(info: t.Optional[str] = None, err: bool = False) -> None: + """This command stops execution and waits for the user to press any + key to continue. This is similar to the Windows batch "pause" + command. If the program is not run through a terminal, this command + will instead do nothing. + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param info: The message to print before pausing. Defaults to + ``"Press any key to continue..."``. + :param err: if set to message goes to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + if not isatty(sys.stdin) or not isatty(sys.stdout): + return + + if info is None: + info = _("Press any key to continue...") + + try: + if info: + echo(info, nl=False, err=err) + try: + getchar() + except (KeyboardInterrupt, EOFError): + pass + finally: + if info: + echo(err=err) diff --git a/venv/lib/python3.10/site-packages/click/testing.py b/venv/lib/python3.10/site-packages/click/testing.py new file mode 100644 index 0000000..e395c2e --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/testing.py @@ -0,0 +1,479 @@ +import contextlib +import io +import os +import shlex +import shutil +import sys +import tempfile +import typing as t +from types import TracebackType + +from . import formatting +from . import termui +from . import utils +from ._compat import _find_binary_reader + +if t.TYPE_CHECKING: + from .core import BaseCommand + + +class EchoingStdin: + def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: + self._input = input + self._output = output + self._paused = False + + def __getattr__(self, x: str) -> t.Any: + return getattr(self._input, x) + + def _echo(self, rv: bytes) -> bytes: + if not self._paused: + self._output.write(rv) + + return rv + + def read(self, n: int = -1) -> bytes: + return self._echo(self._input.read(n)) + + def read1(self, n: int = -1) -> bytes: + return self._echo(self._input.read1(n)) # type: ignore + + def readline(self, n: int = -1) -> bytes: + return self._echo(self._input.readline(n)) + + def readlines(self) -> t.List[bytes]: + return [self._echo(x) for x in self._input.readlines()] + + def __iter__(self) -> t.Iterator[bytes]: + return iter(self._echo(x) for x in self._input) + + def __repr__(self) -> str: + return repr(self._input) + + +@contextlib.contextmanager +def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]: + if stream is None: + yield + else: + stream._paused = True + yield + stream._paused = False + + +class _NamedTextIOWrapper(io.TextIOWrapper): + def __init__( + self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any + ) -> None: + super().__init__(buffer, **kwargs) + self._name = name + self._mode = mode + + @property + def name(self) -> str: + return self._name + + @property + def mode(self) -> str: + return self._mode + + +def make_input_stream( + input: t.Optional[t.Union[str, bytes, t.IO]], charset: str +) -> t.BinaryIO: + # Is already an input stream. + if hasattr(input, "read"): + rv = _find_binary_reader(t.cast(t.IO, input)) + + if rv is not None: + return rv + + raise TypeError("Could not find binary reader for input stream.") + + if input is None: + input = b"" + elif isinstance(input, str): + input = input.encode(charset) + + return io.BytesIO(t.cast(bytes, input)) + + +class Result: + """Holds the captured result of an invoked CLI script.""" + + def __init__( + self, + runner: "CliRunner", + stdout_bytes: bytes, + stderr_bytes: t.Optional[bytes], + return_value: t.Any, + exit_code: int, + exception: t.Optional[BaseException], + exc_info: t.Optional[ + t.Tuple[t.Type[BaseException], BaseException, TracebackType] + ] = None, + ): + #: The runner that created the result + self.runner = runner + #: The standard output as bytes. + self.stdout_bytes = stdout_bytes + #: The standard error as bytes, or None if not available + self.stderr_bytes = stderr_bytes + #: The value returned from the invoked command. + #: + #: .. versionadded:: 8.0 + self.return_value = return_value + #: The exit code as integer. + self.exit_code = exit_code + #: The exception that happened if one did. + self.exception = exception + #: The traceback + self.exc_info = exc_info + + @property + def output(self) -> str: + """The (standard) output as unicode string.""" + return self.stdout + + @property + def stdout(self) -> str: + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + @property + def stderr(self) -> str: + """The standard error as unicode string.""" + if self.stderr_bytes is None: + raise ValueError("stderr not separately captured") + return self.stderr_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + def __repr__(self) -> str: + exc_str = repr(self.exception) if self.exception else "okay" + return f"<{type(self).__name__} {exc_str}>" + + +class CliRunner: + """The CLI runner provides functionality to invoke a Click command line + script for unittesting purposes in a isolated environment. This only + works in single-threaded systems without any concurrency as it changes the + global interpreter state. + + :param charset: the character set for the input and output data. + :param env: a dictionary with environment variables for overriding. + :param echo_stdin: if this is set to `True`, then reading from stdin writes + to stdout. This is useful for showing examples in + some circumstances. Note that regular prompts + will automatically echo the input. + :param mix_stderr: if this is set to `False`, then stdout and stderr are + preserved as independent streams. This is useful for + Unix-philosophy apps that have predictable stdout and + noisy stderr, such that each may be measured + independently + """ + + def __init__( + self, + charset: str = "utf-8", + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + echo_stdin: bool = False, + mix_stderr: bool = True, + ) -> None: + self.charset = charset + self.env = env or {} + self.echo_stdin = echo_stdin + self.mix_stderr = mix_stderr + + def get_default_prog_name(self, cli: "BaseCommand") -> str: + """Given a command object it will return the default program name + for it. The default is the `name` attribute or ``"root"`` if not + set. + """ + return cli.name or "root" + + def make_env( + self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None + ) -> t.Mapping[str, t.Optional[str]]: + """Returns the environment overrides for invoking a script.""" + rv = dict(self.env) + if overrides: + rv.update(overrides) + return rv + + @contextlib.contextmanager + def isolation( + self, + input: t.Optional[t.Union[str, bytes, t.IO]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + color: bool = False, + ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]: + """A context manager that sets up the isolation for invoking of a + command line tool. This sets up stdin with the given input data + and `os.environ` with the overrides from the given dictionary. + This also rebinds some internals in Click to be mocked (like the + prompt functionality). + + This is automatically done in the :meth:`invoke` method. + + :param input: the input stream to put into sys.stdin. + :param env: the environment overrides as dictionary. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + ``stderr`` is opened with ``errors="backslashreplace"`` + instead of the default ``"strict"``. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + """ + bytes_input = make_input_stream(input, self.charset) + echo_input = None + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + old_forced_width = formatting.FORCED_WIDTH + formatting.FORCED_WIDTH = 80 + + env = self.make_env(env) + + bytes_output = io.BytesIO() + + if self.echo_stdin: + bytes_input = echo_input = t.cast( + t.BinaryIO, EchoingStdin(bytes_input, bytes_output) + ) + + sys.stdin = text_input = _NamedTextIOWrapper( + bytes_input, encoding=self.charset, name="", mode="r" + ) + + if self.echo_stdin: + # Force unbuffered reads, otherwise TextIOWrapper reads a + # large chunk which is echoed early. + text_input._CHUNK_SIZE = 1 # type: ignore + + sys.stdout = _NamedTextIOWrapper( + bytes_output, encoding=self.charset, name="", mode="w" + ) + + bytes_error = None + if self.mix_stderr: + sys.stderr = sys.stdout + else: + bytes_error = io.BytesIO() + sys.stderr = _NamedTextIOWrapper( + bytes_error, + encoding=self.charset, + name="", + mode="w", + errors="backslashreplace", + ) + + @_pause_echo(echo_input) # type: ignore + def visible_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(prompt or "") + val = text_input.readline().rstrip("\r\n") + sys.stdout.write(f"{val}\n") + sys.stdout.flush() + return val + + @_pause_echo(echo_input) # type: ignore + def hidden_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(f"{prompt or ''}\n") + sys.stdout.flush() + return text_input.readline().rstrip("\r\n") + + @_pause_echo(echo_input) # type: ignore + def _getchar(echo: bool) -> str: + char = sys.stdin.read(1) + + if echo: + sys.stdout.write(char) + + sys.stdout.flush() + return char + + default_color = color + + def should_strip_ansi( + stream: t.Optional[t.IO] = None, color: t.Optional[bool] = None + ) -> bool: + if color is None: + return not default_color + return not color + + old_visible_prompt_func = termui.visible_prompt_func + old_hidden_prompt_func = termui.hidden_prompt_func + old__getchar_func = termui._getchar + old_should_strip_ansi = utils.should_strip_ansi # type: ignore + termui.visible_prompt_func = visible_input + termui.hidden_prompt_func = hidden_input + termui._getchar = _getchar + utils.should_strip_ansi = should_strip_ansi # type: ignore + + old_env = {} + try: + for key, value in env.items(): + old_env[key] = os.environ.get(key) + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + yield (bytes_output, bytes_error) + finally: + for key, value in old_env.items(): + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + sys.stdout = old_stdout + sys.stderr = old_stderr + sys.stdin = old_stdin + termui.visible_prompt_func = old_visible_prompt_func + termui.hidden_prompt_func = old_hidden_prompt_func + termui._getchar = old__getchar_func + utils.should_strip_ansi = old_should_strip_ansi # type: ignore + formatting.FORCED_WIDTH = old_forced_width + + def invoke( + self, + cli: "BaseCommand", + args: t.Optional[t.Union[str, t.Sequence[str]]] = None, + input: t.Optional[t.Union[str, bytes, t.IO]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + catch_exceptions: bool = True, + color: bool = False, + **extra: t.Any, + ) -> Result: + """Invokes a command in an isolated environment. The arguments are + forwarded directly to the command line script, the `extra` keyword + arguments are passed to the :meth:`~clickpkg.Command.main` function of + the command. + + This returns a :class:`Result` object. + + :param cli: the command to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. + :param input: the input data for `sys.stdin`. + :param env: the environment overrides. + :param catch_exceptions: Whether to catch any other exceptions than + ``SystemExit``. + :param extra: the keyword arguments to pass to :meth:`main`. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + The result object has the ``return_value`` attribute with + the value returned from the invoked command. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionchanged:: 3.0 + Added the ``catch_exceptions`` parameter. + + .. versionchanged:: 3.0 + The result object has the ``exc_info`` attribute with the + traceback if available. + """ + exc_info = None + with self.isolation(input=input, env=env, color=color) as outstreams: + return_value = None + exception: t.Optional[BaseException] = None + exit_code = 0 + + if isinstance(args, str): + args = shlex.split(args) + + try: + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + + try: + return_value = cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: + exc_info = sys.exc_info() + e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code) + + if e_code is None: + e_code = 0 + + if e_code != 0: + exception = e + + if not isinstance(e_code, int): + sys.stdout.write(str(e_code)) + sys.stdout.write("\n") + e_code = 1 + + exit_code = e_code + + except Exception as e: + if not catch_exceptions: + raise + exception = e + exit_code = 1 + exc_info = sys.exc_info() + finally: + sys.stdout.flush() + stdout = outstreams[0].getvalue() + if self.mix_stderr: + stderr = None + else: + stderr = outstreams[1].getvalue() # type: ignore + + return Result( + runner=self, + stdout_bytes=stdout, + stderr_bytes=stderr, + return_value=return_value, + exit_code=exit_code, + exception=exception, + exc_info=exc_info, # type: ignore + ) + + @contextlib.contextmanager + def isolated_filesystem( + self, temp_dir: t.Optional[t.Union[str, os.PathLike]] = None + ) -> t.Iterator[str]: + """A context manager that creates a temporary directory and + changes the current working directory to it. This isolates tests + that affect the contents of the CWD to prevent them from + interfering with each other. + + :param temp_dir: Create the temporary directory under this + directory. If given, the created directory is not removed + when exiting. + + .. versionchanged:: 8.0 + Added the ``temp_dir`` parameter. + """ + cwd = os.getcwd() + dt = tempfile.mkdtemp(dir=temp_dir) # type: ignore[type-var] + os.chdir(dt) + + try: + yield t.cast(str, dt) + finally: + os.chdir(cwd) + + if temp_dir is None: + try: + shutil.rmtree(dt) + except OSError: # noqa: B014 + pass diff --git a/venv/lib/python3.10/site-packages/click/types.py b/venv/lib/python3.10/site-packages/click/types.py new file mode 100644 index 0000000..b45ee53 --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/types.py @@ -0,0 +1,1073 @@ +import os +import stat +import typing as t +from datetime import datetime +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import _get_argv_encoding +from ._compat import get_filesystem_encoding +from ._compat import open_stream +from .exceptions import BadParameter +from .utils import LazyFile +from .utils import safecall + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + from .core import Parameter + from .shell_completion import CompletionItem + + +class ParamType: + """Represents the type of a parameter. Validates and converts values + from the command line or Python into the correct type. + + To implement a custom type, subclass and implement at least the + following: + + - The :attr:`name` class attribute must be set. + - Calling an instance of the type with ``None`` must return + ``None``. This is already implemented by default. + - :meth:`convert` must convert string values to the correct type. + - :meth:`convert` must accept values that are already the correct + type. + - It must be able to convert a value if the ``ctx`` and ``param`` + arguments are ``None``. This can occur when converting prompt + input. + """ + + is_composite: t.ClassVar[bool] = False + arity: t.ClassVar[int] = 1 + + #: the descriptive name of this type + name: str + + #: if a list of this type is expected and the value is pulled from a + #: string environment variable, this is what splits it up. `None` + #: means any whitespace. For all parameters the general rule is that + #: whitespace splits them up. The exception are paths and files which + #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on + #: Windows). + envvar_list_splitter: t.ClassVar[t.Optional[str]] = None + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + # The class name without the "ParamType" suffix. + param_type = type(self).__name__.partition("ParamType")[0] + param_type = param_type.partition("ParameterType")[0] + + # Custom subclasses might not remember to set a name. + if hasattr(self, "name"): + name = self.name + else: + name = param_type + + return {"param_type": param_type, "name": name} + + def __call__( + self, + value: t.Any, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> t.Any: + if value is not None: + return self.convert(value, param, ctx) + + def get_metavar(self, param: "Parameter") -> t.Optional[str]: + """Returns the metavar default for this param if it provides one.""" + + def get_missing_message(self, param: "Parameter") -> t.Optional[str]: + """Optionally might return extra information about a missing + parameter. + + .. versionadded:: 2.0 + """ + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + """Convert the value to the correct type. This is not called if + the value is ``None`` (the missing value). + + This must accept string values from the command line, as well as + values that are already the correct type. It may also convert + other compatible types. + + The ``param`` and ``ctx`` arguments may be ``None`` in certain + situations, such as when converting prompt input. + + If the value cannot be converted, call :meth:`fail` with a + descriptive message. + + :param value: The value to convert. + :param param: The parameter that is using this type to convert + its value. May be ``None``. + :param ctx: The current context that arrived at this value. May + be ``None``. + """ + return value + + def split_envvar_value(self, rv: str) -> t.Sequence[str]: + """Given a value from an environment variable this splits it up + into small chunks depending on the defined envvar list splitter. + + If the splitter is set to `None`, which means that whitespace splits, + then leading and trailing whitespace is ignored. Otherwise, leading + and trailing splitters usually lead to empty items being included. + """ + return (rv or "").split(self.envvar_list_splitter) + + def fail( + self, + message: str, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> "t.NoReturn": + """Helper method to fail with an invalid value message.""" + raise BadParameter(message, ctx=ctx, param=param) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a list of + :class:`~click.shell_completion.CompletionItem` objects for the + incomplete value. Most types do not provide completions, but + some do, and this allows custom types to provide custom + completions as well. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + return [] + + +class CompositeParamType(ParamType): + is_composite = True + + @property + def arity(self) -> int: # type: ignore + raise NotImplementedError() + + +class FuncParamType(ParamType): + def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: + self.name = func.__name__ + self.func = func + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["func"] = self.func + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self.func(value) + except ValueError: + try: + value = str(value) + except UnicodeError: + value = value.decode("utf-8", "replace") + + self.fail(value, param, ctx) + + +class UnprocessedParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + return value + + def __repr__(self) -> str: + return "UNPROCESSED" + + +class StringParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, bytes): + enc = _get_argv_encoding() + try: + value = value.decode(enc) + except UnicodeError: + fs_enc = get_filesystem_encoding() + if fs_enc != enc: + try: + value = value.decode(fs_enc) + except UnicodeError: + value = value.decode("utf-8", "replace") + else: + value = value.decode("utf-8", "replace") + return value + return str(value) + + def __repr__(self) -> str: + return "STRING" + + +class Choice(ParamType): + """The choice type allows a value to be checked against a fixed set + of supported values. All of these values have to be strings. + + You should only pass a list or tuple of choices. Other iterables + (like generators) may lead to surprising results. + + The resulting value will always be one of the originally passed choices + regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` + being specified. + + See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. + """ + + name = "choice" + + def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None: + self.choices = choices + self.case_sensitive = case_sensitive + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["choices"] = self.choices + info_dict["case_sensitive"] = self.case_sensitive + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + choices_str = "|".join(self.choices) + + # Use curly braces to indicate a required argument. + if param.required and param.param_type_name == "argument": + return f"{{{choices_str}}}" + + # Use square braces to indicate an option or optional argument. + return f"[{choices_str}]" + + def get_missing_message(self, param: "Parameter") -> str: + return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices)) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + # Match through normalization and case sensitivity + # first do token_normalize_func, then lowercase + # preserve original `value` to produce an accurate message in + # `self.fail` + normed_value = value + normed_choices = {choice: choice for choice in self.choices} + + if ctx is not None and ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(value) + normed_choices = { + ctx.token_normalize_func(normed_choice): original + for normed_choice, original in normed_choices.items() + } + + if not self.case_sensitive: + normed_value = normed_value.casefold() + normed_choices = { + normed_choice.casefold(): original + for normed_choice, original in normed_choices.items() + } + + if normed_value in normed_choices: + return normed_choices[normed_value] + + choices_str = ", ".join(map(repr, self.choices)) + self.fail( + ngettext( + "{value!r} is not {choice}.", + "{value!r} is not one of {choices}.", + len(self.choices), + ).format(value=value, choice=choices_str, choices=choices_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return f"Choice({list(self.choices)})" + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Complete choices that start with the incomplete value. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + str_choices = map(str, self.choices) + + if self.case_sensitive: + matched = (c for c in str_choices if c.startswith(incomplete)) + else: + incomplete = incomplete.lower() + matched = (c for c in str_choices if c.lower().startswith(incomplete)) + + return [CompletionItem(c) for c in matched] + + +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + + name = "datetime" + + def __init__(self, formats: t.Optional[t.Sequence[str]] = None): + self.formats = formats or ["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["formats"] = self.formats + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + return f"[{'|'.join(self.formats)}]" + + def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]: + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, datetime): + return value + + for format in self.formats: + converted = self._try_to_convert_date(value, format) + + if converted is not None: + return converted + + formats_str = ", ".join(map(repr, self.formats)) + self.fail( + ngettext( + "{value!r} does not match the format {format}.", + "{value!r} does not match the formats {formats}.", + len(self.formats), + ).format(value=value, format=formats_str, formats=formats_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return "DateTime" + + +class _NumberParamTypeBase(ParamType): + _number_class: t.ClassVar[t.Type] + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self._number_class(value) + except ValueError: + self.fail( + _("{value!r} is not a valid {number_type}.").format( + value=value, number_type=self.name + ), + param, + ctx, + ) + + +class _NumberRangeBase(_NumberParamTypeBase): + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + self.min = min + self.max = max + self.min_open = min_open + self.max_open = max_open + self.clamp = clamp + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + min=self.min, + max=self.max, + min_open=self.min_open, + max_open=self.max_open, + clamp=self.clamp, + ) + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import operator + + rv = super().convert(value, param, ctx) + lt_min: bool = self.min is not None and ( + operator.le if self.min_open else operator.lt + )(rv, self.min) + gt_max: bool = self.max is not None and ( + operator.ge if self.max_open else operator.gt + )(rv, self.max) + + if self.clamp: + if lt_min: + return self._clamp(self.min, 1, self.min_open) # type: ignore + + if gt_max: + return self._clamp(self.max, -1, self.max_open) # type: ignore + + if lt_min or gt_max: + self.fail( + _("{value} is not in the range {range}.").format( + value=rv, range=self._describe_range() + ), + param, + ctx, + ) + + return rv + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + """Find the valid value to clamp to bound in the given + direction. + + :param bound: The boundary value. + :param dir: 1 or -1 indicating the direction to move. + :param open: If true, the range does not include the bound. + """ + raise NotImplementedError + + def _describe_range(self) -> str: + """Describe the range for use in help text.""" + if self.min is None: + op = "<" if self.max_open else "<=" + return f"x{op}{self.max}" + + if self.max is None: + op = ">" if self.min_open else ">=" + return f"x{op}{self.min}" + + lop = "<" if self.min_open else "<=" + rop = "<" if self.max_open else "<=" + return f"{self.min}{lop}x{rop}{self.max}" + + def __repr__(self) -> str: + clamp = " clamped" if self.clamp else "" + return f"<{type(self).__name__} {self._describe_range()}{clamp}>" + + +class IntParamType(_NumberParamTypeBase): + name = "integer" + _number_class = int + + def __repr__(self) -> str: + return "INT" + + +class IntRange(_NumberRangeBase, IntParamType): + """Restrict an :data:`click.INT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "integer range" + + def _clamp( # type: ignore + self, bound: int, dir: "te.Literal[1, -1]", open: bool + ) -> int: + if not open: + return bound + + return bound + dir + + +class FloatParamType(_NumberParamTypeBase): + name = "float" + _number_class = float + + def __repr__(self) -> str: + return "FLOAT" + + +class FloatRange(_NumberRangeBase, FloatParamType): + """Restrict a :data:`click.FLOAT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. This is not supported if either + boundary is marked ``open``. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "float range" + + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + super().__init__( + min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp + ) + + if (min_open or max_open) and clamp: + raise TypeError("Clamping is not supported for open bounds.") + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + if not open: + return bound + + # Could use Python 3.9's math.nextafter here, but clamping an + # open float range doesn't seem to be particularly useful. It's + # left up to the user to write a callback to do it if needed. + raise RuntimeError("Clamping is not supported for open bounds.") + + +class BoolParamType(ParamType): + name = "boolean" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if value in {False, True}: + return bool(value) + + norm = value.strip().lower() + + if norm in {"1", "true", "t", "yes", "y", "on"}: + return True + + if norm in {"0", "false", "f", "no", "n", "off"}: + return False + + self.fail( + _("{value!r} is not a valid boolean.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "BOOL" + + +class UUIDParameterType(ParamType): + name = "uuid" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import uuid + + if isinstance(value, uuid.UUID): + return value + + value = value.strip() + + try: + return uuid.UUID(value) + except ValueError: + self.fail( + _("{value!r} is not a valid UUID.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "UUID" + + +class File(ParamType): + """Declares a parameter to be a file for reading or writing. The file + is automatically closed once the context tears down (after the command + finished working). + + Files can be opened for reading or writing. The special value ``-`` + indicates stdin or stdout depending on the mode. + + By default, the file is opened for reading text data, but it can also be + opened in binary mode or for writing. The encoding parameter can be used + to force a specific encoding. + + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. + + Starting with Click 2.0, files can also be opened atomically in which + case all writes go into a separate file in the same folder and upon + completion the file will be moved over to the original location. This + is useful if a file regularly read by other users is modified. + + See :ref:`file-args` for more information. + """ + + name = "filename" + envvar_list_splitter = os.path.pathsep + + def __init__( + self, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: t.Optional[bool] = None, + atomic: bool = False, + ) -> None: + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update(mode=self.mode, encoding=self.encoding) + return info_dict + + def resolve_lazy_flag(self, value: t.Any) -> bool: + if self.lazy is not None: + return self.lazy + if value == "-": + return False + elif "w" in self.mode: + return True + return False + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + if hasattr(value, "read") or hasattr(value, "write"): + return value + + lazy = self.resolve_lazy_flag(value) + + if lazy: + f: t.IO = t.cast( + t.IO, + LazyFile( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ), + ) + + if ctx is not None: + ctx.call_on_close(f.close_intelligently) # type: ignore + + return f + + f, should_close = open_stream( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + # If a context is provided, we automatically close the file + # at the end of the context execution (or flush out). If a + # context does not exist, it's the caller's responsibility to + # properly close the file. This for instance happens when the + # type is used with prompts. + if ctx is not None: + if should_close: + ctx.call_on_close(safecall(f.close)) + else: + ctx.call_on_close(safecall(f.flush)) + + return f + except OSError as e: # noqa: B014 + self.fail(f"'{os.fsdecode(value)}': {e.strerror}", param, ctx) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide file path completions. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + return [CompletionItem(incomplete, type="file")] + + +class Path(ParamType): + """The ``Path`` type is similar to the :class:`File` type, but + returns the filename instead of an open file. Various checks can be + enabled to validate the type of file and permissions. + + :param exists: The file or directory needs to exist for the value to + be valid. If this is not set to ``True``, and the file does not + exist, then all further checks are silently skipped. + :param file_okay: Allow a file as a value. + :param dir_okay: Allow a directory as a value. + :param readable: if true, a readable check is performed. + :param writable: if true, a writable check is performed. + :param executable: if true, an executable check is performed. + :param resolve_path: Make the value absolute and resolve any + symlinks. A ``~`` is not expanded, as this is supposed to be + done by the shell only. + :param allow_dash: Allow a single dash as a value, which indicates + a standard stream (but does not open it). Use + :func:`~click.open_file` to handle opening this value. + :param path_type: Convert the incoming path value to this type. If + ``None``, keep Python's default, which is ``str``. Useful to + convert to :class:`pathlib.Path`. + + .. versionchanged:: 8.1 + Added the ``executable`` parameter. + + .. versionchanged:: 8.0 + Allow passing ``type=pathlib.Path``. + + .. versionchanged:: 6.0 + Added the ``allow_dash`` parameter. + """ + + envvar_list_splitter = os.path.pathsep + + def __init__( + self, + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: t.Optional[t.Type] = None, + executable: bool = False, + ): + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.readable = readable + self.writable = writable + self.executable = executable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.type = path_type + + if self.file_okay and not self.dir_okay: + self.name = _("file") + elif self.dir_okay and not self.file_okay: + self.name = _("directory") + else: + self.name = _("path") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + exists=self.exists, + file_okay=self.file_okay, + dir_okay=self.dir_okay, + writable=self.writable, + readable=self.readable, + allow_dash=self.allow_dash, + ) + return info_dict + + def coerce_path_result(self, rv: t.Any) -> t.Any: + if self.type is not None and not isinstance(rv, self.type): + if self.type is str: + rv = os.fsdecode(rv) + elif self.type is bytes: + rv = os.fsencode(rv) + else: + rv = self.type(rv) + + return rv + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + rv = value + + is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") + + if not is_dash: + if self.resolve_path: + # os.path.realpath doesn't resolve symlinks on Windows + # until Python 3.8. Use pathlib for now. + import pathlib + + rv = os.fsdecode(pathlib.Path(rv).resolve()) + + try: + st = os.stat(rv) + except OSError: + if not self.exists: + return self.coerce_path_result(rv) + self.fail( + _("{name} {filename!r} does not exist.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + + if not self.file_okay and stat.S_ISREG(st.st_mode): + self.fail( + _("{name} {filename!r} is a file.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + if not self.dir_okay and stat.S_ISDIR(st.st_mode): + self.fail( + _("{name} '{filename}' is a directory.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + + if self.readable and not os.access(rv, os.R_OK): + self.fail( + _("{name} {filename!r} is not readable.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + + if self.writable and not os.access(rv, os.W_OK): + self.fail( + _("{name} {filename!r} is not writable.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + + if self.executable and not os.access(value, os.X_OK): + self.fail( + _("{name} {filename!r} is not executable.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + + return self.coerce_path_result(rv) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide path completions for only + directories or any paths. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + type = "dir" if self.dir_okay and not self.file_okay else "file" + return [CompletionItem(incomplete, type=type)] + + +class Tuple(CompositeParamType): + """The default behavior of Click is to apply a type on a value directly. + This works well in most cases, except for when `nargs` is set to a fixed + count and different types should be used for different items. In this + case the :class:`Tuple` type can be used. This type can only be used + if `nargs` is set to a fixed number. + + For more information see :ref:`tuple-type`. + + This can be selected by using a Python tuple literal as a type. + + :param types: a list of types that should be used for the tuple items. + """ + + def __init__(self, types: t.Sequence[t.Union[t.Type, ParamType]]) -> None: + self.types = [convert_type(ty) for ty in types] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["types"] = [t.to_info_dict() for t in self.types] + return info_dict + + @property + def name(self) -> str: # type: ignore + return f"<{' '.join(ty.name for ty in self.types)}>" + + @property + def arity(self) -> int: # type: ignore + return len(self.types) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + len_type = len(self.types) + len_value = len(value) + + if len_value != len_type: + self.fail( + ngettext( + "{len_type} values are required, but {len_value} was given.", + "{len_type} values are required, but {len_value} were given.", + len_value, + ).format(len_type=len_type, len_value=len_value), + param=param, + ctx=ctx, + ) + + return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) + + +def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType: + """Find the most appropriate :class:`ParamType` for the given Python + type. If the type isn't provided, it can be inferred from a default + value. + """ + guessed_type = False + + if ty is None and default is not None: + if isinstance(default, (tuple, list)): + # If the default is empty, ty will remain None and will + # return STRING. + if default: + item = default[0] + + # A tuple of tuples needs to detect the inner types. + # Can't call convert recursively because that would + # incorrectly unwind the tuple to a single type. + if isinstance(item, (tuple, list)): + ty = tuple(map(type, item)) + else: + ty = type(item) + else: + ty = type(default) + + guessed_type = True + + if isinstance(ty, tuple): + return Tuple(ty) + + if isinstance(ty, ParamType): + return ty + + if ty is str or ty is None: + return STRING + + if ty is int: + return INT + + if ty is float: + return FLOAT + + if ty is bool: + return BOOL + + if guessed_type: + return STRING + + if __debug__: + try: + if issubclass(ty, ParamType): + raise AssertionError( + f"Attempted to use an uninstantiated parameter type ({ty})." + ) + except TypeError: + # ty is an instance (correct), so issubclass fails. + pass + + return FuncParamType(ty) + + +#: A dummy parameter type that just does nothing. From a user's +#: perspective this appears to just be the same as `STRING` but +#: internally no string conversion takes place if the input was bytes. +#: This is usually useful when working with file paths as they can +#: appear in bytes and unicode. +#: +#: For path related uses the :class:`Path` type is a better choice but +#: there are situations where an unprocessed type is useful which is why +#: it is is provided. +#: +#: .. versionadded:: 4.0 +UNPROCESSED = UnprocessedParamType() + +#: A unicode string parameter type which is the implicit default. This +#: can also be selected by using ``str`` as type. +STRING = StringParamType() + +#: An integer parameter. This can also be selected by using ``int`` as +#: type. +INT = IntParamType() + +#: A floating point value parameter. This can also be selected by using +#: ``float`` as type. +FLOAT = FloatParamType() + +#: A boolean parameter. This is the default for boolean flags. This can +#: also be selected by using ``bool`` as a type. +BOOL = BoolParamType() + +#: A UUID parameter. +UUID = UUIDParameterType() diff --git a/venv/lib/python3.10/site-packages/click/utils.py b/venv/lib/python3.10/site-packages/click/utils.py new file mode 100644 index 0000000..8283788 --- /dev/null +++ b/venv/lib/python3.10/site-packages/click/utils.py @@ -0,0 +1,580 @@ +import os +import re +import sys +import typing as t +from functools import update_wrapper +from types import ModuleType + +from ._compat import _default_text_stderr +from ._compat import _default_text_stdout +from ._compat import _find_binary_writer +from ._compat import auto_wrap_for_ansi +from ._compat import binary_streams +from ._compat import get_filesystem_encoding +from ._compat import open_stream +from ._compat import should_strip_ansi +from ._compat import strip_ansi +from ._compat import text_streams +from ._compat import WIN +from .globals import resolve_color_default + +if t.TYPE_CHECKING: + import typing_extensions as te + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + + +def _posixify(name: str) -> str: + return "-".join(name.split()).lower() + + +def safecall(func: F) -> F: + """Wraps a function so that it swallows exceptions.""" + + def wrapper(*args, **kwargs): # type: ignore + try: + return func(*args, **kwargs) + except Exception: + pass + + return update_wrapper(t.cast(F, wrapper), func) + + +def make_str(value: t.Any) -> str: + """Converts a value into a valid string.""" + if isinstance(value, bytes): + try: + return value.decode(get_filesystem_encoding()) + except UnicodeError: + return value.decode("utf-8", "replace") + return str(value) + + +def make_default_short_help(help: str, max_length: int = 45) -> str: + """Returns a condensed version of help string.""" + # Consider only the first paragraph. + paragraph_end = help.find("\n\n") + + if paragraph_end != -1: + help = help[:paragraph_end] + + # Collapse newlines, tabs, and spaces. + words = help.split() + + if not words: + return "" + + # The first paragraph started with a "no rewrap" marker, ignore it. + if words[0] == "\b": + words = words[1:] + + total_length = 0 + last_index = len(words) - 1 + + for i, word in enumerate(words): + total_length += len(word) + (i > 0) + + if total_length > max_length: # too long, truncate + break + + if word[-1] == ".": # sentence end, truncate without "..." + return " ".join(words[: i + 1]) + + if total_length == max_length and i != last_index: + break # not at sentence end, truncate with "..." + else: + return " ".join(words) # no truncation needed + + # Account for the length of the suffix. + total_length += len("...") + + # remove words until the length is short enough + while i > 0: + total_length -= len(words[i]) + (i > 0) + + if total_length <= max_length: + break + + i -= 1 + + return " ".join(words[:i]) + "..." + + +class LazyFile: + """A lazy file works like a regular file but it does not fully open + the file but it does perform some basic checks early to see if the + filename parameter does make sense. This is useful for safely opening + files for writing. + """ + + def __init__( + self, + filename: str, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, + ): + self.name = filename + self.mode = mode + self.encoding = encoding + self.errors = errors + self.atomic = atomic + self._f: t.Optional[t.IO] + + if filename == "-": + self._f, self.should_close = open_stream(filename, mode, encoding, errors) + else: + if "r" in mode: + # Open and close the file in case we're opening it for + # reading so that we can catch at least some errors in + # some cases early. + open(filename, mode).close() + self._f = None + self.should_close = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self.open(), name) + + def __repr__(self) -> str: + if self._f is not None: + return repr(self._f) + return f"" + + def open(self) -> t.IO: + """Opens the file if it's not yet open. This call might fail with + a :exc:`FileError`. Not handling this error will produce an error + that Click shows. + """ + if self._f is not None: + return self._f + try: + rv, self.should_close = open_stream( + self.name, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + except OSError as e: # noqa: E402 + from .exceptions import FileError + + raise FileError(self.name, hint=e.strerror) from e + self._f = rv + return rv + + def close(self) -> None: + """Closes the underlying file, no matter what.""" + if self._f is not None: + self._f.close() + + def close_intelligently(self) -> None: + """This function only closes the file if it was opened by the lazy + file wrapper. For instance this will never close stdin. + """ + if self.should_close: + self.close() + + def __enter__(self) -> "LazyFile": + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self.close_intelligently() + + def __iter__(self) -> t.Iterator[t.AnyStr]: + self.open() + return iter(self._f) # type: ignore + + +class KeepOpenFile: + def __init__(self, file: t.IO) -> None: + self._file = file + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._file, name) + + def __enter__(self) -> "KeepOpenFile": + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + pass + + def __repr__(self) -> str: + return repr(self._file) + + def __iter__(self) -> t.Iterator[t.AnyStr]: + return iter(self._file) + + +def echo( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.Any]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, +) -> None: + """Print a message and newline to stdout or a file. This should be + used instead of :func:`print` because it provides better support + for different data, files, and environments. + + Compared to :func:`print`, this does the following: + + - Ensures that the output encoding is not misconfigured on Linux. + - Supports Unicode in the Windows console. + - Supports writing to binary outputs, and supports writing bytes + to text outputs. + - Supports colors and styles on Windows. + - Removes ANSI color and style codes if the output does not look + like an interactive terminal. + - Always flushes the output. + + :param message: The string or bytes to output. Other objects are + converted to strings. + :param file: The file to write to. Defaults to ``stdout``. + :param err: Write to ``stderr`` instead of ``stdout``. + :param nl: Print a newline after the message. Enabled by default. + :param color: Force showing or hiding colors and other styles. By + default Click will remove color if the output does not look like + an interactive terminal. + + .. versionchanged:: 6.0 + Support Unicode output on the Windows console. Click does not + modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` + will still not support Unicode. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionadded:: 3.0 + Added the ``err`` parameter. + + .. versionchanged:: 2.0 + Support colors on Windows if colorama is installed. + """ + if file is None: + if err: + file = _default_text_stderr() + else: + file = _default_text_stdout() + + # Convert non bytes/text into the native string type. + if message is not None and not isinstance(message, (str, bytes, bytearray)): + out: t.Optional[t.Union[str, bytes]] = str(message) + else: + out = message + + if nl: + out = out or "" + if isinstance(out, str): + out += "\n" + else: + out += b"\n" + + if not out: + file.flush() + return + + # If there is a message and the value looks like bytes, we manually + # need to find the binary stream and write the message in there. + # This is done separately so that most stream types will work as you + # would expect. Eg: you can write to StringIO for other cases. + if isinstance(out, (bytes, bytearray)): + binary_file = _find_binary_writer(file) + + if binary_file is not None: + file.flush() + binary_file.write(out) + binary_file.flush() + return + + # ANSI style code support. For no message or bytes, nothing happens. + # When outputting to a file instead of a terminal, strip codes. + else: + color = resolve_color_default(color) + + if should_strip_ansi(file, color): + out = strip_ansi(out) + elif WIN: + if auto_wrap_for_ansi is not None: + file = auto_wrap_for_ansi(file) # type: ignore + elif not color: + out = strip_ansi(out) + + file.write(out) # type: ignore + file.flush() + + +def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO: + """Returns a system stream for byte processing. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + """ + opener = binary_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener() + + +def get_text_stream( + name: "te.Literal['stdin', 'stdout', 'stderr']", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", +) -> t.TextIO: + """Returns a system stream for text processing. This usually returns + a wrapped stream around a binary stream returned from + :func:`get_binary_stream` but it also can take shortcuts for already + correctly configured streams. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + :param encoding: overrides the detected default encoding. + :param errors: overrides the default error mode. + """ + opener = text_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener(encoding, errors) + + +def open_file( + filename: str, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: bool = False, + atomic: bool = False, +) -> t.IO: + """Open a file, with extra behavior to handle ``'-'`` to indicate + a standard stream, lazy open on write, and atomic write. Similar to + the behavior of the :class:`~click.File` param type. + + If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is + wrapped so that using it in a context manager will not close it. + This makes it possible to use the function without accidentally + closing a standard stream: + + .. code-block:: python + + with open_file(filename) as f: + ... + + :param filename: The name of the file to open, or ``'-'`` for + ``stdin``/``stdout``. + :param mode: The mode in which to open the file. + :param encoding: The encoding to decode or encode a file opened in + text mode. + :param errors: The error handling mode. + :param lazy: Wait to open the file until it is accessed. For read + mode, the file is temporarily opened to raise access errors + early, then closed until it is read again. + :param atomic: Write to a temporary file and replace the given file + on close. + + .. versionadded:: 3.0 + """ + if lazy: + return t.cast(t.IO, LazyFile(filename, mode, encoding, errors, atomic=atomic)) + + f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) + + if not should_close: + f = t.cast(t.IO, KeepOpenFile(f)) + + return f + + +def format_filename( + filename: t.Union[str, bytes, os.PathLike], shorten: bool = False +) -> str: + """Formats a filename for user display. The main purpose of this + function is to ensure that the filename can be displayed at all. This + will decode the filename to unicode if necessary in a way that it will + not fail. Optionally, it can shorten the filename to not include the + full path to the filename. + + :param filename: formats a filename for UI display. This will also convert + the filename into unicode without failing. + :param shorten: this optionally shortens the filename to strip of the + path that leads up to it. + """ + if shorten: + filename = os.path.basename(filename) + + return os.fsdecode(filename) + + +def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: + r"""Returns the config folder for the application. The default behavior + is to return whatever is most appropriate for the operating system. + + To give you an idea, for an app called ``"Foo Bar"``, something like + the following folders could be returned: + + Mac OS X: + ``~/Library/Application Support/Foo Bar`` + Mac OS X (POSIX): + ``~/.foo-bar`` + Unix: + ``~/.config/foo-bar`` + Unix (POSIX): + ``~/.foo-bar`` + Windows (roaming): + ``C:\Users\\AppData\Roaming\Foo Bar`` + Windows (not roaming): + ``C:\Users\\AppData\Local\Foo Bar`` + + .. versionadded:: 2.0 + + :param app_name: the application name. This should be properly capitalized + and can contain whitespace. + :param roaming: controls if the folder should be roaming or not on Windows. + Has no affect otherwise. + :param force_posix: if this is set to `True` then on any POSIX system the + folder will be stored in the home folder with a leading + dot instead of the XDG config home or darwin's + application support folder. + """ + if WIN: + key = "APPDATA" if roaming else "LOCALAPPDATA" + folder = os.environ.get(key) + if folder is None: + folder = os.path.expanduser("~") + return os.path.join(folder, app_name) + if force_posix: + return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) + if sys.platform == "darwin": + return os.path.join( + os.path.expanduser("~/Library/Application Support"), app_name + ) + return os.path.join( + os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), + _posixify(app_name), + ) + + +class PacifyFlushWrapper: + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped: t.IO) -> None: + self.wrapped = wrapped + + def flush(self) -> None: + try: + self.wrapped.flush() + except OSError as e: + import errno + + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr: str) -> t.Any: + return getattr(self.wrapped, attr) + + +def _detect_program_name( + path: t.Optional[str] = None, _main: t.Optional[ModuleType] = None +) -> str: + """Determine the command used to run the program, for use in help + text. If a file or entry point was executed, the file name is + returned. If ``python -m`` was used to execute a module or package, + ``python -m name`` is returned. + + This doesn't try to be too precise, the goal is to give a concise + name for help text. Files are only shown as their name without the + path. ``python`` is only shown for modules, and the full path to + ``sys.executable`` is not shown. + + :param path: The Python file being executed. Python puts this in + ``sys.argv[0]``, which is used by default. + :param _main: The ``__main__`` module. This should only be passed + during internal testing. + + .. versionadded:: 8.0 + Based on command args detection in the Werkzeug reloader. + + :meta private: + """ + if _main is None: + _main = sys.modules["__main__"] + + if not path: + path = sys.argv[0] + + # The value of __package__ indicates how Python was called. It may + # not exist if a setuptools script is installed as an egg. It may be + # set incorrectly for entry points created with pip on Windows. + if getattr(_main, "__package__", None) is None or ( + os.name == "nt" + and _main.__package__ == "" + and not os.path.exists(path) + and os.path.exists(f"{path}.exe") + ): + # Executed a file, like "python app.py". + return os.path.basename(path) + + # Executed a module, like "python -m example". + # Rewritten by Python from "-m script" to "/path/to/script.py". + # Need to look at main module to determine how it was executed. + py_module = t.cast(str, _main.__package__) + name = os.path.splitext(os.path.basename(path))[0] + + # A submodule like "example.cli". + if name != "__main__": + py_module = f"{py_module}.{name}" + + return f"python -m {py_module.lstrip('.')}" + + +def _expand_args( + args: t.Iterable[str], + *, + user: bool = True, + env: bool = True, + glob_recursive: bool = True, +) -> t.List[str]: + """Simulate Unix shell expansion with Python functions. + + See :func:`glob.glob`, :func:`os.path.expanduser`, and + :func:`os.path.expandvars`. + + This is intended for use on Windows, where the shell does not do any + expansion. It may not exactly match what a Unix shell would do. + + :param args: List of command line arguments to expand. + :param user: Expand user home directory. + :param env: Expand environment variables. + :param glob_recursive: ``**`` matches directories recursively. + + .. versionchanged:: 8.1 + Invalid glob patterns are treated as empty expansions rather + than raising an error. + + .. versionadded:: 8.0 + + :meta private: + """ + from glob import glob + + out = [] + + for arg in args: + if user: + arg = os.path.expanduser(arg) + + if env: + arg = os.path.expandvars(arg) + + try: + matches = glob(arg, recursive=glob_recursive) + except re.error: + matches = [] + + if not matches: + out.append(arg) + else: + out.extend(matches) + + return out diff --git a/venv/lib/python3.10/site-packages/deprecated/__init__.py b/venv/lib/python3.10/site-packages/deprecated/__init__.py new file mode 100644 index 0000000..32e992a --- /dev/null +++ b/venv/lib/python3.10/site-packages/deprecated/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +""" +Deprecated Library +================== + +Python ``@deprecated`` decorator to deprecate old python classes, functions or methods. + +""" + +__version__ = "1.2.13" +__author__ = u"Laurent LAPORTE " +__date__ = "2021-09-05" +__credits__ = "(c) Laurent LAPORTE" + +from deprecated.classic import deprecated diff --git a/venv/lib/python3.10/site-packages/deprecated/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/deprecated/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..e86636f Binary files /dev/null and b/venv/lib/python3.10/site-packages/deprecated/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/deprecated/__pycache__/classic.cpython-310.pyc b/venv/lib/python3.10/site-packages/deprecated/__pycache__/classic.cpython-310.pyc new file mode 100644 index 0000000..d2897f2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/deprecated/__pycache__/classic.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/deprecated/__pycache__/sphinx.cpython-310.pyc b/venv/lib/python3.10/site-packages/deprecated/__pycache__/sphinx.cpython-310.pyc new file mode 100644 index 0000000..fd08526 Binary files /dev/null and b/venv/lib/python3.10/site-packages/deprecated/__pycache__/sphinx.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/deprecated/classic.py b/venv/lib/python3.10/site-packages/deprecated/classic.py new file mode 100644 index 0000000..6ca3f27 --- /dev/null +++ b/venv/lib/python3.10/site-packages/deprecated/classic.py @@ -0,0 +1,292 @@ +# -*- coding: utf-8 -*- +""" +Classic deprecation warning +=========================== + +Classic ``@deprecated`` decorator to deprecate old python classes, functions or methods. + +.. _The Warnings Filter: https://docs.python.org/3/library/warnings.html#the-warnings-filter +""" +import functools +import inspect +import platform +import warnings + +import wrapt + +try: + # If the C extension for wrapt was compiled and wrapt/_wrappers.pyd exists, then the + # stack level that should be passed to warnings.warn should be 2. However, if using + # a pure python wrapt, a extra stacklevel is required. + import wrapt._wrappers + + _routine_stacklevel = 2 + _class_stacklevel = 2 +except ImportError: + _routine_stacklevel = 3 + if platform.python_implementation() == "PyPy": + _class_stacklevel = 2 + else: + _class_stacklevel = 3 + +string_types = (type(b''), type(u'')) + + +class ClassicAdapter(wrapt.AdapterFactory): + """ + Classic adapter -- *for advanced usage only* + + This adapter is used to get the deprecation message according to the wrapped object type: + class, function, standard method, static method, or class method. + + This is the base class of the :class:`~deprecated.sphinx.SphinxAdapter` class + which is used to update the wrapped object docstring. + + You can also inherit this class to change the deprecation message. + + In the following example, we change the message into "The ... is deprecated.": + + .. code-block:: python + + import inspect + + from deprecated.classic import ClassicAdapter + from deprecated.classic import deprecated + + + class MyClassicAdapter(ClassicAdapter): + def get_deprecated_msg(self, wrapped, instance): + if instance is None: + if inspect.isclass(wrapped): + fmt = "The class {name} is deprecated." + else: + fmt = "The function {name} is deprecated." + else: + if inspect.isclass(instance): + fmt = "The class method {name} is deprecated." + else: + fmt = "The method {name} is deprecated." + if self.reason: + fmt += " ({reason})" + if self.version: + fmt += " -- Deprecated since version {version}." + return fmt.format(name=wrapped.__name__, + reason=self.reason or "", + version=self.version or "") + + Then, you can use your ``MyClassicAdapter`` class like this in your source code: + + .. code-block:: python + + @deprecated(reason="use another function", adapter_cls=MyClassicAdapter) + def some_old_function(x, y): + return x + y + """ + + def __init__(self, reason="", version="", action=None, category=DeprecationWarning): + """ + Construct a wrapper adapter. + + :type reason: str + :param reason: + Reason message which documents the deprecation in your library (can be omitted). + + :type version: str + :param version: + Version of your project which deprecates this feature. + If you follow the `Semantic Versioning `_, + the version number has the format "MAJOR.MINOR.PATCH". + + :type action: str + :param action: + A warning filter used to activate or not the deprecation warning. + Can be one of "error", "ignore", "always", "default", "module", or "once". + If ``None`` or empty, the the global filtering mechanism is used. + See: `The Warnings Filter`_ in the Python documentation. + + :type category: type + :param category: + The warning category to use for the deprecation warning. + By default, the category class is :class:`~DeprecationWarning`, + you can inherit this class to define your own deprecation warning category. + """ + self.reason = reason or "" + self.version = version or "" + self.action = action + self.category = category + super(ClassicAdapter, self).__init__() + + def get_deprecated_msg(self, wrapped, instance): + """ + Get the deprecation warning message for the user. + + :param wrapped: Wrapped class or function. + + :param instance: The object to which the wrapped function was bound when it was called. + + :return: The warning message. + """ + if instance is None: + if inspect.isclass(wrapped): + fmt = "Call to deprecated class {name}." + else: + fmt = "Call to deprecated function (or staticmethod) {name}." + else: + if inspect.isclass(instance): + fmt = "Call to deprecated class method {name}." + else: + fmt = "Call to deprecated method {name}." + if self.reason: + fmt += " ({reason})" + if self.version: + fmt += " -- Deprecated since version {version}." + return fmt.format(name=wrapped.__name__, reason=self.reason or "", version=self.version or "") + + def __call__(self, wrapped): + """ + Decorate your class or function. + + :param wrapped: Wrapped class or function. + + :return: the decorated class or function. + + .. versionchanged:: 1.2.4 + Don't pass arguments to :meth:`object.__new__` (other than *cls*). + + .. versionchanged:: 1.2.8 + The warning filter is not set if the *action* parameter is ``None`` or empty. + """ + if inspect.isclass(wrapped): + old_new1 = wrapped.__new__ + + def wrapped_cls(cls, *args, **kwargs): + msg = self.get_deprecated_msg(wrapped, None) + if self.action: + with warnings.catch_warnings(): + warnings.simplefilter(self.action, self.category) + warnings.warn(msg, category=self.category, stacklevel=_class_stacklevel) + else: + warnings.warn(msg, category=self.category, stacklevel=_class_stacklevel) + if old_new1 is object.__new__: + return old_new1(cls) + # actually, we don't know the real signature of *old_new1* + return old_new1(cls, *args, **kwargs) + + wrapped.__new__ = staticmethod(wrapped_cls) + + return wrapped + + +def deprecated(*args, **kwargs): + """ + This is a decorator which can be used to mark functions + as deprecated. It will result in a warning being emitted + when the function is used. + + **Classic usage:** + + To use this, decorate your deprecated function with **@deprecated** decorator: + + .. code-block:: python + + from deprecated import deprecated + + + @deprecated + def some_old_function(x, y): + return x + y + + You can also decorate a class or a method: + + .. code-block:: python + + from deprecated import deprecated + + + class SomeClass(object): + @deprecated + def some_old_method(self, x, y): + return x + y + + + @deprecated + class SomeOldClass(object): + pass + + You can give a *reason* message to help the developer to choose another function/class, + and a *version* number to specify the starting version number of the deprecation. + + .. code-block:: python + + from deprecated import deprecated + + + @deprecated(reason="use another function", version='1.2.0') + def some_old_function(x, y): + return x + y + + The *category* keyword argument allow you to specify the deprecation warning class of your choice. + By default, :exc:`DeprecationWarning` is used but you can choose :exc:`FutureWarning`, + :exc:`PendingDeprecationWarning` or a custom subclass. + + .. code-block:: python + + from deprecated import deprecated + + + @deprecated(category=PendingDeprecationWarning) + def some_old_function(x, y): + return x + y + + The *action* keyword argument allow you to locally change the warning filtering. + *action* can be one of "error", "ignore", "always", "default", "module", or "once". + If ``None``, empty or missing, the the global filtering mechanism is used. + See: `The Warnings Filter`_ in the Python documentation. + + .. code-block:: python + + from deprecated import deprecated + + + @deprecated(action="error") + def some_old_function(x, y): + return x + y + + """ + if args and isinstance(args[0], string_types): + kwargs['reason'] = args[0] + args = args[1:] + + if args and not callable(args[0]): + raise TypeError(repr(type(args[0]))) + + if args: + action = kwargs.get('action') + category = kwargs.get('category', DeprecationWarning) + adapter_cls = kwargs.pop('adapter_cls', ClassicAdapter) + adapter = adapter_cls(**kwargs) + + wrapped = args[0] + if inspect.isclass(wrapped): + wrapped = adapter(wrapped) + return wrapped + + elif inspect.isroutine(wrapped): + + @wrapt.decorator(adapter=adapter) + def wrapper_function(wrapped_, instance_, args_, kwargs_): + msg = adapter.get_deprecated_msg(wrapped_, instance_) + if action: + with warnings.catch_warnings(): + warnings.simplefilter(action, category) + warnings.warn(msg, category=category, stacklevel=_routine_stacklevel) + else: + warnings.warn(msg, category=category, stacklevel=_routine_stacklevel) + return wrapped_(*args_, **kwargs_) + + return wrapper_function(wrapped) + + else: + raise TypeError(repr(type(wrapped))) + + return functools.partial(deprecated, **kwargs) diff --git a/venv/lib/python3.10/site-packages/deprecated/sphinx.py b/venv/lib/python3.10/site-packages/deprecated/sphinx.py new file mode 100644 index 0000000..6e11e66 --- /dev/null +++ b/venv/lib/python3.10/site-packages/deprecated/sphinx.py @@ -0,0 +1,258 @@ +# coding: utf-8 +""" +Sphinx directive integration +============================ + +We usually need to document the life-cycle of functions and classes: +when they are created, modified or deprecated. + +To do that, `Sphinx `_ has a set +of `Paragraph-level markups `_: + +- ``versionadded``: to document the version of the project which added the described feature to the library, +- ``versionchanged``: to document changes of a feature, +- ``deprecated``: to document a deprecated feature. + +The purpose of this module is to defined decorators which adds this Sphinx directives +to the docstring of your function and classes. + +Of course, the ``@deprecated`` decorator will emit a deprecation warning +when the function/method is called or the class is constructed. +""" +import re +import textwrap + +import wrapt + +from deprecated.classic import ClassicAdapter +from deprecated.classic import deprecated as _classic_deprecated + + +class SphinxAdapter(ClassicAdapter): + """ + Sphinx adapter -- *for advanced usage only* + + This adapter override the :class:`~deprecated.classic.ClassicAdapter` + in order to add the Sphinx directives to the end of the function/class docstring. + Such a directive is a `Paragraph-level markup `_ + + - The directive can be one of "versionadded", "versionchanged" or "deprecated". + - The version number is added if provided. + - The reason message is obviously added in the directive block if not empty. + """ + + def __init__( + self, + directive, + reason="", + version="", + action=None, + category=DeprecationWarning, + line_length=70, + ): + """ + Construct a wrapper adapter. + + :type directive: str + :param directive: + Sphinx directive: can be one of "versionadded", "versionchanged" or "deprecated". + + :type reason: str + :param reason: + Reason message which documents the deprecation in your library (can be omitted). + + :type version: str + :param version: + Version of your project which deprecates this feature. + If you follow the `Semantic Versioning `_, + the version number has the format "MAJOR.MINOR.PATCH". + + :type action: str + :param action: + A warning filter used to activate or not the deprecation warning. + Can be one of "error", "ignore", "always", "default", "module", or "once". + If ``None`` or empty, the the global filtering mechanism is used. + See: `The Warnings Filter`_ in the Python documentation. + + :type category: type + :param category: + The warning category to use for the deprecation warning. + By default, the category class is :class:`~DeprecationWarning`, + you can inherit this class to define your own deprecation warning category. + + :type line_length: int + :param line_length: + Max line length of the directive text. If non nul, a long text is wrapped in several lines. + """ + if not version: + # https://github.com/tantale/deprecated/issues/40 + raise ValueError("'version' argument is required in Sphinx directives") + self.directive = directive + self.line_length = line_length + super(SphinxAdapter, self).__init__(reason=reason, version=version, action=action, category=category) + + def __call__(self, wrapped): + """ + Add the Sphinx directive to your class or function. + + :param wrapped: Wrapped class or function. + + :return: the decorated class or function. + """ + # -- build the directive division + fmt = ".. {directive}:: {version}" if self.version else ".. {directive}::" + div_lines = [fmt.format(directive=self.directive, version=self.version)] + width = self.line_length - 3 if self.line_length > 3 else 2 ** 16 + reason = textwrap.dedent(self.reason).strip() + for paragraph in reason.splitlines(): + if paragraph: + div_lines.extend( + textwrap.fill( + paragraph, + width=width, + initial_indent=" ", + subsequent_indent=" ", + ).splitlines() + ) + else: + div_lines.append("") + + # -- get the docstring, normalize the trailing newlines + docstring = textwrap.dedent(wrapped.__doc__ or "") + if docstring: + # An empty line must separate the original docstring and the directive. + docstring = re.sub(r"\n+$", "", docstring, flags=re.DOTALL) + "\n\n" + else: + # Avoid "Explicit markup ends without a blank line" when the decorated function has no docstring + docstring = "\n" + + # -- append the directive division to the docstring + docstring += "".join("{}\n".format(line) for line in div_lines) + + wrapped.__doc__ = docstring + if self.directive in {"versionadded", "versionchanged"}: + return wrapped + return super(SphinxAdapter, self).__call__(wrapped) + + def get_deprecated_msg(self, wrapped, instance): + """ + Get the deprecation warning message (without Sphinx cross-referencing syntax) for the user. + + :param wrapped: Wrapped class or function. + + :param instance: The object to which the wrapped function was bound when it was called. + + :return: The warning message. + + .. versionadded:: 1.2.12 + Strip Sphinx cross-referencing syntax from warning message. + + """ + msg = super(SphinxAdapter, self).get_deprecated_msg(wrapped, instance) + # Strip Sphinx cross reference syntax (like ":function:", ":py:func:" and ":py:meth:") + # Possible values are ":role:`foo`", ":domain:role:`foo`" + # where ``role`` and ``domain`` should match "[a-zA-Z]+" + msg = re.sub(r"(?: : [a-zA-Z]+ )? : [a-zA-Z]+ : (`[^`]*`)", r"\1", msg, flags=re.X) + return msg + + +def versionadded(reason="", version="", line_length=70): + """ + This decorator can be used to insert a "versionadded" directive + in your function/class docstring in order to documents the + version of the project which adds this new functionality in your library. + + :param str reason: + Reason message which documents the addition in your library (can be omitted). + + :param str version: + Version of your project which adds this feature. + If you follow the `Semantic Versioning `_, + the version number has the format "MAJOR.MINOR.PATCH", and, + in the case of a new functionality, the "PATCH" component should be "0". + + :type line_length: int + :param line_length: + Max line length of the directive text. If non nul, a long text is wrapped in several lines. + + :return: the decorated function. + """ + adapter = SphinxAdapter( + 'versionadded', + reason=reason, + version=version, + line_length=line_length, + ) + return adapter + + +def versionchanged(reason="", version="", line_length=70): + """ + This decorator can be used to insert a "versionchanged" directive + in your function/class docstring in order to documents the + version of the project which modifies this functionality in your library. + + :param str reason: + Reason message which documents the modification in your library (can be omitted). + + :param str version: + Version of your project which modifies this feature. + If you follow the `Semantic Versioning `_, + the version number has the format "MAJOR.MINOR.PATCH". + + :type line_length: int + :param line_length: + Max line length of the directive text. If non nul, a long text is wrapped in several lines. + + :return: the decorated function. + """ + adapter = SphinxAdapter( + 'versionchanged', + reason=reason, + version=version, + line_length=line_length, + ) + return adapter + + +def deprecated(reason="", version="", line_length=70, **kwargs): + """ + This decorator can be used to insert a "deprecated" directive + in your function/class docstring in order to documents the + version of the project which deprecates this functionality in your library. + + :param str reason: + Reason message which documents the deprecation in your library (can be omitted). + + :param str version: + Version of your project which deprecates this feature. + If you follow the `Semantic Versioning `_, + the version number has the format "MAJOR.MINOR.PATCH". + + :type line_length: int + :param line_length: + Max line length of the directive text. If non nul, a long text is wrapped in several lines. + + Keyword arguments can be: + + - "action": + A warning filter used to activate or not the deprecation warning. + Can be one of "error", "ignore", "always", "default", "module", or "once". + If ``None``, empty or missing, the the global filtering mechanism is used. + + - "category": + The warning category to use for the deprecation warning. + By default, the category class is :class:`~DeprecationWarning`, + you can inherit this class to define your own deprecation warning category. + + :return: a decorator used to deprecate a function. + + .. versionchanged:: 1.2.13 + Change the signature of the decorator to reflect the valid use cases. + """ + directive = kwargs.pop('directive', 'deprecated') + adapter_cls = kwargs.pop('adapter_cls', SphinxAdapter) + kwargs["reason"] = reason + kwargs["version"] = version + kwargs["line_length"] = line_length + return _classic_deprecated(directive=directive, adapter_cls=adapter_cls, **kwargs) diff --git a/venv/lib/python3.10/site-packages/distutils-precedence.pth b/venv/lib/python3.10/site-packages/distutils-precedence.pth new file mode 100644 index 0000000..6de4198 --- /dev/null +++ b/venv/lib/python3.10/site-packages/distutils-precedence.pth @@ -0,0 +1 @@ +import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'stdlib') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/venv/lib/python3.10/site-packages/dns/__init__.py b/venv/lib/python3.10/site-packages/dns/__init__.py new file mode 100644 index 0000000..0473ca1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/__init__.py @@ -0,0 +1,66 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""dnspython DNS toolkit""" + +__all__ = [ + 'asyncbackend', + 'asyncquery', + 'asyncresolver', + 'dnssec', + 'e164', + 'edns', + 'entropy', + 'exception', + 'flags', + 'immutable', + 'inet', + 'ipv4', + 'ipv6', + 'message', + 'name', + 'namedict', + 'node', + 'opcode', + 'query', + 'rcode', + 'rdata', + 'rdataclass', + 'rdataset', + 'rdatatype', + 'renderer', + 'resolver', + 'reversename', + 'rrset', + 'serial', + 'set', + 'tokenizer', + 'transaction', + 'tsig', + 'tsigkeyring', + 'ttl', + 'rdtypes', + 'update', + 'version', + 'versioned', + 'wire', + 'xfr', + 'zone', + 'zonefile', +] + +from dns.version import version as __version__ # noqa diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..e5eed47 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/_asyncbackend.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/_asyncbackend.cpython-310.pyc new file mode 100644 index 0000000..2f57fad Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/_asyncbackend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/_asyncio_backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/_asyncio_backend.cpython-310.pyc new file mode 100644 index 0000000..9d3c772 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/_asyncio_backend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/_curio_backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/_curio_backend.cpython-310.pyc new file mode 100644 index 0000000..ef0b7e0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/_curio_backend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/_immutable_attr.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/_immutable_attr.cpython-310.pyc new file mode 100644 index 0000000..c8927f4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/_immutable_attr.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/_immutable_ctx.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/_immutable_ctx.cpython-310.pyc new file mode 100644 index 0000000..eb1d614 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/_immutable_ctx.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/_trio_backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/_trio_backend.cpython-310.pyc new file mode 100644 index 0000000..9f3aef5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/_trio_backend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/asyncbackend.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/asyncbackend.cpython-310.pyc new file mode 100644 index 0000000..94c1530 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/asyncbackend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/asyncquery.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/asyncquery.cpython-310.pyc new file mode 100644 index 0000000..39922a1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/asyncquery.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/asyncresolver.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/asyncresolver.cpython-310.pyc new file mode 100644 index 0000000..753ab57 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/asyncresolver.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/dnssec.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/dnssec.cpython-310.pyc new file mode 100644 index 0000000..b0ea19c Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/dnssec.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/e164.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/e164.cpython-310.pyc new file mode 100644 index 0000000..4f9b662 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/e164.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/edns.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/edns.cpython-310.pyc new file mode 100644 index 0000000..1306933 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/edns.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/entropy.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/entropy.cpython-310.pyc new file mode 100644 index 0000000..80fd619 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/entropy.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/enum.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/enum.cpython-310.pyc new file mode 100644 index 0000000..08ee379 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/enum.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/exception.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/exception.cpython-310.pyc new file mode 100644 index 0000000..94ae995 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/exception.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/flags.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/flags.cpython-310.pyc new file mode 100644 index 0000000..9fa52eb Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/flags.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/grange.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/grange.cpython-310.pyc new file mode 100644 index 0000000..8394bec Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/grange.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/immutable.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/immutable.cpython-310.pyc new file mode 100644 index 0000000..ecd1a38 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/immutable.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/inet.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/inet.cpython-310.pyc new file mode 100644 index 0000000..484d1d0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/inet.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/ipv4.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/ipv4.cpython-310.pyc new file mode 100644 index 0000000..1a6096d Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/ipv4.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/ipv6.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/ipv6.cpython-310.pyc new file mode 100644 index 0000000..7d51a10 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/ipv6.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/message.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/message.cpython-310.pyc new file mode 100644 index 0000000..d6081f2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/message.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/name.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/name.cpython-310.pyc new file mode 100644 index 0000000..eaf6da8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/name.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/namedict.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/namedict.cpython-310.pyc new file mode 100644 index 0000000..107b5bc Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/namedict.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/node.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/node.cpython-310.pyc new file mode 100644 index 0000000..35e5ccc Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/node.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/opcode.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/opcode.cpython-310.pyc new file mode 100644 index 0000000..51f54bc Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/opcode.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/query.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/query.cpython-310.pyc new file mode 100644 index 0000000..33df030 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/query.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/rcode.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/rcode.cpython-310.pyc new file mode 100644 index 0000000..794c655 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/rcode.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/rdata.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/rdata.cpython-310.pyc new file mode 100644 index 0000000..e645ea3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/rdata.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/rdataclass.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/rdataclass.cpython-310.pyc new file mode 100644 index 0000000..5440591 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/rdataclass.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/rdataset.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/rdataset.cpython-310.pyc new file mode 100644 index 0000000..1a7fc84 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/rdataset.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/rdatatype.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/rdatatype.cpython-310.pyc new file mode 100644 index 0000000..a52a306 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/rdatatype.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/renderer.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/renderer.cpython-310.pyc new file mode 100644 index 0000000..b2d3ef4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/renderer.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/resolver.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/resolver.cpython-310.pyc new file mode 100644 index 0000000..ef1f366 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/resolver.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/reversename.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/reversename.cpython-310.pyc new file mode 100644 index 0000000..136b473 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/reversename.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/rrset.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/rrset.cpython-310.pyc new file mode 100644 index 0000000..521774a Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/rrset.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/serial.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/serial.cpython-310.pyc new file mode 100644 index 0000000..fa52764 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/serial.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/set.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/set.cpython-310.pyc new file mode 100644 index 0000000..f79cd5d Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/set.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/tokenizer.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/tokenizer.cpython-310.pyc new file mode 100644 index 0000000..462c862 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/tokenizer.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/transaction.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/transaction.cpython-310.pyc new file mode 100644 index 0000000..3352d6f Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/transaction.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/tsig.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/tsig.cpython-310.pyc new file mode 100644 index 0000000..0856048 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/tsig.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/tsigkeyring.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/tsigkeyring.cpython-310.pyc new file mode 100644 index 0000000..2179a7a Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/tsigkeyring.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/ttl.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/ttl.cpython-310.pyc new file mode 100644 index 0000000..ae043c7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/ttl.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/update.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/update.cpython-310.pyc new file mode 100644 index 0000000..b4a56f6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/update.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/version.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/version.cpython-310.pyc new file mode 100644 index 0000000..e06266c Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/versioned.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/versioned.cpython-310.pyc new file mode 100644 index 0000000..04ecfdd Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/versioned.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/win32util.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/win32util.cpython-310.pyc new file mode 100644 index 0000000..7a30e5c Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/win32util.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/wire.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/wire.cpython-310.pyc new file mode 100644 index 0000000..9deb549 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/wire.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/xfr.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/xfr.cpython-310.pyc new file mode 100644 index 0000000..6701942 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/xfr.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/zone.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/zone.cpython-310.pyc new file mode 100644 index 0000000..b2adbd6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/zone.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/__pycache__/zonefile.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/__pycache__/zonefile.cpython-310.pyc new file mode 100644 index 0000000..5e18053 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/__pycache__/zonefile.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/_asyncbackend.py b/venv/lib/python3.10/site-packages/dns/_asyncbackend.py new file mode 100644 index 0000000..1f3a828 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/_asyncbackend.py @@ -0,0 +1,69 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# This is a nullcontext for both sync and async. 3.7 has a nullcontext, +# but it is only for sync use. + +class NullContext: + def __init__(self, enter_result=None): + self.enter_result = enter_result + + def __enter__(self): + return self.enter_result + + def __exit__(self, exc_type, exc_value, traceback): + pass + + async def __aenter__(self): + return self.enter_result + + async def __aexit__(self, exc_type, exc_value, traceback): + pass + + +# These are declared here so backends can import them without creating +# circular dependencies with dns.asyncbackend. + +class Socket: # pragma: no cover + async def close(self): + pass + + async def getpeername(self): + raise NotImplementedError + + async def getsockname(self): + raise NotImplementedError + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.close() + + +class DatagramSocket(Socket): # pragma: no cover + async def sendto(self, what, destination, timeout): + raise NotImplementedError + + async def recvfrom(self, size, timeout): + raise NotImplementedError + + +class StreamSocket(Socket): # pragma: no cover + async def sendall(self, what, timeout): + raise NotImplementedError + + async def recv(self, size, timeout): + raise NotImplementedError + + +class Backend: # pragma: no cover + def name(self): + return 'unknown' + + async def make_socket(self, af, socktype, proto=0, + source=None, destination=None, timeout=None, + ssl_context=None, server_hostname=None): + raise NotImplementedError + + def datagram_connection_required(self): + return False diff --git a/venv/lib/python3.10/site-packages/dns/_asyncio_backend.py b/venv/lib/python3.10/site-packages/dns/_asyncio_backend.py new file mode 100644 index 0000000..d737d13 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/_asyncio_backend.py @@ -0,0 +1,149 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""asyncio library query support""" + +import socket +import asyncio +import sys + +import dns._asyncbackend +import dns.exception + + +_is_win32 = sys.platform == 'win32' + +def _get_running_loop(): + try: + return asyncio.get_running_loop() + except AttributeError: # pragma: no cover + return asyncio.get_event_loop() + + +class _DatagramProtocol: + def __init__(self): + self.transport = None + self.recvfrom = None + + def connection_made(self, transport): + self.transport = transport + + def datagram_received(self, data, addr): + if self.recvfrom and not self.recvfrom.done(): + self.recvfrom.set_result((data, addr)) + self.recvfrom = None + + def error_received(self, exc): # pragma: no cover + if self.recvfrom and not self.recvfrom.done(): + self.recvfrom.set_exception(exc) + + def connection_lost(self, exc): + if self.recvfrom and not self.recvfrom.done(): + self.recvfrom.set_exception(exc) + + def close(self): + self.transport.close() + + +async def _maybe_wait_for(awaitable, timeout): + if timeout: + try: + return await asyncio.wait_for(awaitable, timeout) + except asyncio.TimeoutError: + raise dns.exception.Timeout(timeout=timeout) + else: + return await awaitable + + +class DatagramSocket(dns._asyncbackend.DatagramSocket): + def __init__(self, family, transport, protocol): + self.family = family + self.transport = transport + self.protocol = protocol + + async def sendto(self, what, destination, timeout): # pragma: no cover + # no timeout for asyncio sendto + self.transport.sendto(what, destination) + + async def recvfrom(self, size, timeout): + # ignore size as there's no way I know to tell protocol about it + done = _get_running_loop().create_future() + assert self.protocol.recvfrom is None + self.protocol.recvfrom = done + await _maybe_wait_for(done, timeout) + return done.result() + + async def close(self): + self.protocol.close() + + async def getpeername(self): + return self.transport.get_extra_info('peername') + + async def getsockname(self): + return self.transport.get_extra_info('sockname') + + +class StreamSocket(dns._asyncbackend.StreamSocket): + def __init__(self, af, reader, writer): + self.family = af + self.reader = reader + self.writer = writer + + async def sendall(self, what, timeout): + self.writer.write(what) + return await _maybe_wait_for(self.writer.drain(), timeout) + + async def recv(self, size, timeout): + return await _maybe_wait_for(self.reader.read(size), + timeout) + + async def close(self): + self.writer.close() + try: + await self.writer.wait_closed() + except AttributeError: # pragma: no cover + pass + + async def getpeername(self): + return self.writer.get_extra_info('peername') + + async def getsockname(self): + return self.writer.get_extra_info('sockname') + + +class Backend(dns._asyncbackend.Backend): + def name(self): + return 'asyncio' + + async def make_socket(self, af, socktype, proto=0, + source=None, destination=None, timeout=None, + ssl_context=None, server_hostname=None): + if destination is None and socktype == socket.SOCK_DGRAM and \ + _is_win32: + raise NotImplementedError('destinationless datagram sockets ' + 'are not supported by asyncio ' + 'on Windows') + loop = _get_running_loop() + if socktype == socket.SOCK_DGRAM: + transport, protocol = await loop.create_datagram_endpoint( + _DatagramProtocol, source, family=af, + proto=proto, remote_addr=destination) + return DatagramSocket(af, transport, protocol) + elif socktype == socket.SOCK_STREAM: + (r, w) = await _maybe_wait_for( + asyncio.open_connection(destination[0], + destination[1], + ssl=ssl_context, + family=af, + proto=proto, + local_addr=source, + server_hostname=server_hostname), + timeout) + return StreamSocket(af, r, w) + raise NotImplementedError('unsupported socket ' + + f'type {socktype}') # pragma: no cover + + async def sleep(self, interval): + await asyncio.sleep(interval) + + def datagram_connection_required(self): + return _is_win32 diff --git a/venv/lib/python3.10/site-packages/dns/_curio_backend.py b/venv/lib/python3.10/site-packages/dns/_curio_backend.py new file mode 100644 index 0000000..6fa7b3a --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/_curio_backend.py @@ -0,0 +1,108 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""curio async I/O library query support""" + +import socket +import curio +import curio.socket # type: ignore + +import dns._asyncbackend +import dns.exception +import dns.inet + + +def _maybe_timeout(timeout): + if timeout: + return curio.ignore_after(timeout) + else: + return dns._asyncbackend.NullContext() + + +# for brevity +_lltuple = dns.inet.low_level_address_tuple + +# pylint: disable=redefined-outer-name + + +class DatagramSocket(dns._asyncbackend.DatagramSocket): + def __init__(self, socket): + self.socket = socket + self.family = socket.family + + async def sendto(self, what, destination, timeout): + async with _maybe_timeout(timeout): + return await self.socket.sendto(what, destination) + raise dns.exception.Timeout(timeout=timeout) # pragma: no cover + + async def recvfrom(self, size, timeout): + async with _maybe_timeout(timeout): + return await self.socket.recvfrom(size) + raise dns.exception.Timeout(timeout=timeout) + + async def close(self): + await self.socket.close() + + async def getpeername(self): + return self.socket.getpeername() + + async def getsockname(self): + return self.socket.getsockname() + + +class StreamSocket(dns._asyncbackend.StreamSocket): + def __init__(self, socket): + self.socket = socket + self.family = socket.family + + async def sendall(self, what, timeout): + async with _maybe_timeout(timeout): + return await self.socket.sendall(what) + raise dns.exception.Timeout(timeout=timeout) + + async def recv(self, size, timeout): + async with _maybe_timeout(timeout): + return await self.socket.recv(size) + raise dns.exception.Timeout(timeout=timeout) + + async def close(self): + await self.socket.close() + + async def getpeername(self): + return self.socket.getpeername() + + async def getsockname(self): + return self.socket.getsockname() + + +class Backend(dns._asyncbackend.Backend): + def name(self): + return 'curio' + + async def make_socket(self, af, socktype, proto=0, + source=None, destination=None, timeout=None, + ssl_context=None, server_hostname=None): + if socktype == socket.SOCK_DGRAM: + s = curio.socket.socket(af, socktype, proto) + try: + if source: + s.bind(_lltuple(source, af)) + except Exception: # pragma: no cover + await s.close() + raise + return DatagramSocket(s) + elif socktype == socket.SOCK_STREAM: + if source: + source_addr = _lltuple(source, af) + else: + source_addr = None + async with _maybe_timeout(timeout): + s = await curio.open_connection(destination[0], destination[1], + ssl=ssl_context, + source_addr=source_addr, + server_hostname=server_hostname) + return StreamSocket(s) + raise NotImplementedError('unsupported socket ' + + f'type {socktype}') # pragma: no cover + + async def sleep(self, interval): + await curio.sleep(interval) diff --git a/venv/lib/python3.10/site-packages/dns/_immutable_attr.py b/venv/lib/python3.10/site-packages/dns/_immutable_attr.py new file mode 100644 index 0000000..4d89be9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/_immutable_attr.py @@ -0,0 +1,84 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# This implementation of the immutable decorator is for python 3.6, +# which doesn't have Context Variables. This implementation is somewhat +# costly for classes with slots, as it adds a __dict__ to them. + + +import inspect + + +class _Immutable: + """Immutable mixin class""" + + # Note we MUST NOT have __slots__ as that causes + # + # TypeError: multiple bases have instance lay-out conflict + # + # when we get mixed in with another class with slots. When we + # get mixed into something with slots, it effectively adds __dict__ to + # the slots of the other class, which allows attribute setting to work, + # albeit at the cost of the dictionary. + + def __setattr__(self, name, value): + if not hasattr(self, '_immutable_init') or \ + self._immutable_init is not self: + raise TypeError("object doesn't support attribute assignment") + else: + super().__setattr__(name, value) + + def __delattr__(self, name): + if not hasattr(self, '_immutable_init') or \ + self._immutable_init is not self: + raise TypeError("object doesn't support attribute assignment") + else: + super().__delattr__(name) + + +def _immutable_init(f): + def nf(*args, **kwargs): + try: + # Are we already initializing an immutable class? + previous = args[0]._immutable_init + except AttributeError: + # We are the first! + previous = None + object.__setattr__(args[0], '_immutable_init', args[0]) + try: + # call the actual __init__ + f(*args, **kwargs) + finally: + if not previous: + # If we started the initialization, establish immutability + # by removing the attribute that allows mutation + object.__delattr__(args[0], '_immutable_init') + nf.__signature__ = inspect.signature(f) + return nf + + +def immutable(cls): + if _Immutable in cls.__mro__: + # Some ancestor already has the mixin, so just make sure we keep + # following the __init__ protocol. + cls.__init__ = _immutable_init(cls.__init__) + if hasattr(cls, '__setstate__'): + cls.__setstate__ = _immutable_init(cls.__setstate__) + ncls = cls + else: + # Mixin the Immutable class and follow the __init__ protocol. + class ncls(_Immutable, cls): + + @_immutable_init + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + if hasattr(cls, '__setstate__'): + @_immutable_init + def __setstate__(self, *args, **kwargs): + super().__setstate__(*args, **kwargs) + + # make ncls have the same name and module as cls + ncls.__name__ = cls.__name__ + ncls.__qualname__ = cls.__qualname__ + ncls.__module__ = cls.__module__ + return ncls diff --git a/venv/lib/python3.10/site-packages/dns/_immutable_ctx.py b/venv/lib/python3.10/site-packages/dns/_immutable_ctx.py new file mode 100644 index 0000000..ececdbe --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/_immutable_ctx.py @@ -0,0 +1,75 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# This implementation of the immutable decorator requires python >= +# 3.7, and is significantly more storage efficient when making classes +# with slots immutable. It's also faster. + +import contextvars +import inspect + + +_in__init__ = contextvars.ContextVar('_immutable_in__init__', default=False) + + +class _Immutable: + """Immutable mixin class""" + + # We set slots to the empty list to say "we don't have any attributes". + # We do this so that if we're mixed in with a class with __slots__, we + # don't cause a __dict__ to be added which would waste space. + + __slots__ = () + + def __setattr__(self, name, value): + if _in__init__.get() is not self: + raise TypeError("object doesn't support attribute assignment") + else: + super().__setattr__(name, value) + + def __delattr__(self, name): + if _in__init__.get() is not self: + raise TypeError("object doesn't support attribute assignment") + else: + super().__delattr__(name) + + +def _immutable_init(f): + def nf(*args, **kwargs): + previous = _in__init__.set(args[0]) + try: + # call the actual __init__ + f(*args, **kwargs) + finally: + _in__init__.reset(previous) + nf.__signature__ = inspect.signature(f) + return nf + + +def immutable(cls): + if _Immutable in cls.__mro__: + # Some ancestor already has the mixin, so just make sure we keep + # following the __init__ protocol. + cls.__init__ = _immutable_init(cls.__init__) + if hasattr(cls, '__setstate__'): + cls.__setstate__ = _immutable_init(cls.__setstate__) + ncls = cls + else: + # Mixin the Immutable class and follow the __init__ protocol. + class ncls(_Immutable, cls): + # We have to do the __slots__ declaration here too! + __slots__ = () + + @_immutable_init + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + if hasattr(cls, '__setstate__'): + @_immutable_init + def __setstate__(self, *args, **kwargs): + super().__setstate__(*args, **kwargs) + + # make ncls have the same name and module as cls + ncls.__name__ = cls.__name__ + ncls.__qualname__ = cls.__qualname__ + ncls.__module__ = cls.__module__ + return ncls diff --git a/venv/lib/python3.10/site-packages/dns/_trio_backend.py b/venv/lib/python3.10/site-packages/dns/_trio_backend.py new file mode 100644 index 0000000..a00d4a4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/_trio_backend.py @@ -0,0 +1,121 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""trio async I/O library query support""" + +import socket +import trio +import trio.socket # type: ignore + +import dns._asyncbackend +import dns.exception +import dns.inet + + +def _maybe_timeout(timeout): + if timeout: + return trio.move_on_after(timeout) + else: + return dns._asyncbackend.NullContext() + + +# for brevity +_lltuple = dns.inet.low_level_address_tuple + +# pylint: disable=redefined-outer-name + + +class DatagramSocket(dns._asyncbackend.DatagramSocket): + def __init__(self, socket): + self.socket = socket + self.family = socket.family + + async def sendto(self, what, destination, timeout): + with _maybe_timeout(timeout): + return await self.socket.sendto(what, destination) + raise dns.exception.Timeout(timeout=timeout) # pragma: no cover + + async def recvfrom(self, size, timeout): + with _maybe_timeout(timeout): + return await self.socket.recvfrom(size) + raise dns.exception.Timeout(timeout=timeout) + + async def close(self): + self.socket.close() + + async def getpeername(self): + return self.socket.getpeername() + + async def getsockname(self): + return self.socket.getsockname() + + +class StreamSocket(dns._asyncbackend.StreamSocket): + def __init__(self, family, stream, tls=False): + self.family = family + self.stream = stream + self.tls = tls + + async def sendall(self, what, timeout): + with _maybe_timeout(timeout): + return await self.stream.send_all(what) + raise dns.exception.Timeout(timeout=timeout) + + async def recv(self, size, timeout): + with _maybe_timeout(timeout): + return await self.stream.receive_some(size) + raise dns.exception.Timeout(timeout=timeout) + + async def close(self): + await self.stream.aclose() + + async def getpeername(self): + if self.tls: + return self.stream.transport_stream.socket.getpeername() + else: + return self.stream.socket.getpeername() + + async def getsockname(self): + if self.tls: + return self.stream.transport_stream.socket.getsockname() + else: + return self.stream.socket.getsockname() + + +class Backend(dns._asyncbackend.Backend): + def name(self): + return 'trio' + + async def make_socket(self, af, socktype, proto=0, source=None, + destination=None, timeout=None, + ssl_context=None, server_hostname=None): + s = trio.socket.socket(af, socktype, proto) + stream = None + try: + if source: + await s.bind(_lltuple(source, af)) + if socktype == socket.SOCK_STREAM: + with _maybe_timeout(timeout): + await s.connect(_lltuple(destination, af)) + except Exception: # pragma: no cover + s.close() + raise + if socktype == socket.SOCK_DGRAM: + return DatagramSocket(s) + elif socktype == socket.SOCK_STREAM: + stream = trio.SocketStream(s) + s = None + tls = False + if ssl_context: + tls = True + try: + stream = trio.SSLStream(stream, ssl_context, + server_hostname=server_hostname) + except Exception: # pragma: no cover + await stream.aclose() + raise + return StreamSocket(af, stream, tls) + raise NotImplementedError('unsupported socket ' + + f'type {socktype}') # pragma: no cover + + async def sleep(self, interval): + await trio.sleep(interval) diff --git a/venv/lib/python3.10/site-packages/dns/asyncbackend.py b/venv/lib/python3.10/site-packages/dns/asyncbackend.py new file mode 100644 index 0000000..089d3d3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/asyncbackend.py @@ -0,0 +1,101 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.exception + +# pylint: disable=unused-import + +from dns._asyncbackend import Socket, DatagramSocket, \ + StreamSocket, Backend # noqa: + +# pylint: enable=unused-import + +_default_backend = None + +_backends = {} + +# Allow sniffio import to be disabled for testing purposes +_no_sniffio = False + +class AsyncLibraryNotFoundError(dns.exception.DNSException): + pass + + +def get_backend(name): + """Get the specified asynchronous backend. + + *name*, a ``str``, the name of the backend. Currently the "trio", + "curio", and "asyncio" backends are available. + + Raises NotImplementError if an unknown backend name is specified. + """ + # pylint: disable=import-outside-toplevel,redefined-outer-name + backend = _backends.get(name) + if backend: + return backend + if name == 'trio': + import dns._trio_backend + backend = dns._trio_backend.Backend() + elif name == 'curio': + import dns._curio_backend + backend = dns._curio_backend.Backend() + elif name == 'asyncio': + import dns._asyncio_backend + backend = dns._asyncio_backend.Backend() + else: + raise NotImplementedError(f'unimplemented async backend {name}') + _backends[name] = backend + return backend + + +def sniff(): + """Attempt to determine the in-use asynchronous I/O library by using + the ``sniffio`` module if it is available. + + Returns the name of the library, or raises AsyncLibraryNotFoundError + if the library cannot be determined. + """ + # pylint: disable=import-outside-toplevel + try: + if _no_sniffio: + raise ImportError + import sniffio + try: + return sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + raise AsyncLibraryNotFoundError('sniffio cannot determine ' + + 'async library') + except ImportError: + import asyncio + try: + asyncio.get_running_loop() + return 'asyncio' + except RuntimeError: + raise AsyncLibraryNotFoundError('no async library detected') + except AttributeError: # pragma: no cover + # we have to check current_task on 3.6 + if not asyncio.Task.current_task(): + raise AsyncLibraryNotFoundError('no async library detected') + return 'asyncio' + + +def get_default_backend(): + """Get the default backend, initializing it if necessary. + """ + if _default_backend: + return _default_backend + + return set_default_backend(sniff()) + + +def set_default_backend(name): + """Set the default backend. + + It's not normally necessary to call this method, as + ``get_default_backend()`` will initialize the backend + appropriately in many cases. If ``sniffio`` is not installed, or + in testing situations, this function allows the backend to be set + explicitly. + """ + global _default_backend + _default_backend = get_backend(name) + return _default_backend diff --git a/venv/lib/python3.10/site-packages/dns/asyncbackend.pyi b/venv/lib/python3.10/site-packages/dns/asyncbackend.pyi new file mode 100644 index 0000000..1ec9d32 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/asyncbackend.pyi @@ -0,0 +1,13 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +class Backend: + ... + +def get_backend(name: str) -> Backend: + ... +def sniff() -> str: + ... +def get_default_backend() -> Backend: + ... +def set_default_backend(name: str) -> Backend: + ... diff --git a/venv/lib/python3.10/site-packages/dns/asyncquery.py b/venv/lib/python3.10/site-packages/dns/asyncquery.py new file mode 100644 index 0000000..4ec97fb --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/asyncquery.py @@ -0,0 +1,523 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Talk to a DNS server.""" + +import base64 +import socket +import struct +import time + +import dns.asyncbackend +import dns.exception +import dns.inet +import dns.name +import dns.message +import dns.rcode +import dns.rdataclass +import dns.rdatatype + +from dns.query import _compute_times, _matches_destination, BadResponse, ssl, \ + UDPMode, _have_httpx, _have_http2, NoDOH + +if _have_httpx: + import httpx + +# for brevity +_lltuple = dns.inet.low_level_address_tuple + + +def _source_tuple(af, address, port): + # Make a high level source tuple, or return None if address and port + # are both None + if address or port: + if address is None: + if af == socket.AF_INET: + address = '0.0.0.0' + elif af == socket.AF_INET6: + address = '::' + else: + raise NotImplementedError(f'unknown address family {af}') + return (address, port) + else: + return None + + +def _timeout(expiration, now=None): + if expiration: + if not now: + now = time.time() + return max(expiration - now, 0) + else: + return None + + +async def send_udp(sock, what, destination, expiration=None): + """Send a DNS message to the specified UDP socket. + + *sock*, a ``dns.asyncbackend.DatagramSocket``. + + *what*, a ``bytes`` or ``dns.message.Message``, the message to send. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where to send the query. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + what = what.to_wire() + sent_time = time.time() + n = await sock.sendto(what, destination, _timeout(expiration, sent_time)) + return (n, sent_time) + + +async def receive_udp(sock, destination=None, expiration=None, + ignore_unexpected=False, one_rr_per_rrset=False, + keyring=None, request_mac=b'', ignore_trailing=False, + raise_on_truncation=False): + """Read a DNS message from a UDP socket. + + *sock*, a ``dns.asyncbackend.DatagramSocket``. + + See :py:func:`dns.query.receive_udp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + wire = b'' + while 1: + (wire, from_address) = await sock.recvfrom(65535, _timeout(expiration)) + if _matches_destination(sock.family, from_address, destination, + ignore_unexpected): + break + received_time = time.time() + r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + raise_on_truncation=raise_on_truncation) + return (r, received_time, from_address) + +async def udp(q, where, timeout=None, port=53, source=None, source_port=0, + ignore_unexpected=False, one_rr_per_rrset=False, + ignore_trailing=False, raise_on_truncation=False, sock=None, + backend=None): + """Return the response obtained after sending a query via UDP. + + *sock*, a ``dns.asyncbackend.DatagramSocket``, or ``None``, + the socket to use for the query. If ``None``, the default, a + socket is created. Note that if a socket is provided, the + *source*, *source_port*, and *backend* are ignored. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.udp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + s = None + # After 3.6 is no longer supported, this can use an AsyncExitStack. + try: + af = dns.inet.af_for_address(where) + destination = _lltuple((where, port), af) + if sock: + s = sock + else: + if not backend: + backend = dns.asyncbackend.get_default_backend() + stuple = _source_tuple(af, source, source_port) + if backend.datagram_connection_required(): + dtuple = (where, port) + else: + dtuple = None + s = await backend.make_socket(af, socket.SOCK_DGRAM, 0, stuple, + dtuple) + await send_udp(s, wire, destination, expiration) + (r, received_time, _) = await receive_udp(s, destination, expiration, + ignore_unexpected, + one_rr_per_rrset, + q.keyring, q.mac, + ignore_trailing, + raise_on_truncation) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + finally: + if not sock and s: + await s.close() + +async def udp_with_fallback(q, where, timeout=None, port=53, source=None, + source_port=0, ignore_unexpected=False, + one_rr_per_rrset=False, ignore_trailing=False, + udp_sock=None, tcp_sock=None, backend=None): + """Return the response to the query, trying UDP first and falling back + to TCP if UDP results in a truncated response. + + *udp_sock*, a ``dns.asyncbackend.DatagramSocket``, or ``None``, + the socket to use for the UDP query. If ``None``, the default, a + socket is created. Note that if a socket is provided the *source*, + *source_port*, and *backend* are ignored for the UDP query. + + *tcp_sock*, a ``dns.asyncbackend.StreamSocket``, or ``None``, the + socket to use for the TCP query. If ``None``, the default, a + socket is created. Note that if a socket is provided *where*, + *source*, *source_port*, and *backend* are ignored for the TCP query. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.udp_with_fallback()` for the documentation + of the other parameters, exceptions, and return type of this + method. + """ + try: + response = await udp(q, where, timeout, port, source, source_port, + ignore_unexpected, one_rr_per_rrset, + ignore_trailing, True, udp_sock, backend) + return (response, False) + except dns.message.Truncated: + response = await tcp(q, where, timeout, port, source, source_port, + one_rr_per_rrset, ignore_trailing, tcp_sock, + backend) + return (response, True) + + +async def send_tcp(sock, what, expiration=None): + """Send a DNS message to the specified TCP socket. + + *sock*, a ``dns.asyncbackend.StreamSocket``. + + See :py:func:`dns.query.send_tcp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + if isinstance(what, dns.message.Message): + what = what.to_wire() + l = len(what) + # copying the wire into tcpmsg is inefficient, but lets us + # avoid writev() or doing a short write that would get pushed + # onto the net + tcpmsg = struct.pack("!H", l) + what + sent_time = time.time() + await sock.sendall(tcpmsg, _timeout(expiration, sent_time)) + return (len(tcpmsg), sent_time) + + +async def _read_exactly(sock, count, expiration): + """Read the specified number of bytes from stream. Keep trying until we + either get the desired amount, or we hit EOF. + """ + s = b'' + while count > 0: + n = await sock.recv(count, _timeout(expiration)) + if n == b'': + raise EOFError + count = count - len(n) + s = s + n + return s + + +async def receive_tcp(sock, expiration=None, one_rr_per_rrset=False, + keyring=None, request_mac=b'', ignore_trailing=False): + """Read a DNS message from a TCP socket. + + *sock*, a ``dns.asyncbackend.StreamSocket``. + + See :py:func:`dns.query.receive_tcp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + ldata = await _read_exactly(sock, 2, expiration) + (l,) = struct.unpack("!H", ldata) + wire = await _read_exactly(sock, l, expiration) + received_time = time.time() + r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing) + return (r, received_time) + + +async def tcp(q, where, timeout=None, port=53, source=None, source_port=0, + one_rr_per_rrset=False, ignore_trailing=False, sock=None, + backend=None): + """Return the response obtained after sending a query via TCP. + + *sock*, a ``dns.asyncbacket.StreamSocket``, or ``None``, the + socket to use for the query. If ``None``, the default, a socket + is created. Note that if a socket is provided + *where*, *port*, *source*, *source_port*, and *backend* are ignored. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.tcp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + s = None + # After 3.6 is no longer supported, this can use an AsyncExitStack. + try: + if sock: + # Verify that the socket is connected, as if it's not connected, + # it's not writable, and the polling in send_tcp() will time out or + # hang forever. + await sock.getpeername() + s = sock + else: + # These are simple (address, port) pairs, not + # family-dependent tuples you pass to lowlevel socket + # code. + af = dns.inet.af_for_address(where) + stuple = _source_tuple(af, source, source_port) + dtuple = (where, port) + if not backend: + backend = dns.asyncbackend.get_default_backend() + s = await backend.make_socket(af, socket.SOCK_STREAM, 0, stuple, + dtuple, timeout) + await send_tcp(s, wire, expiration) + (r, received_time) = await receive_tcp(s, expiration, one_rr_per_rrset, + q.keyring, q.mac, + ignore_trailing) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + finally: + if not sock and s: + await s.close() + +async def tls(q, where, timeout=None, port=853, source=None, source_port=0, + one_rr_per_rrset=False, ignore_trailing=False, sock=None, + backend=None, ssl_context=None, server_hostname=None): + """Return the response obtained after sending a query via TLS. + + *sock*, an ``asyncbackend.StreamSocket``, or ``None``, the socket + to use for the query. If ``None``, the default, a socket is + created. Note that if a socket is provided, it must be a + connected SSL stream socket, and *where*, *port*, + *source*, *source_port*, *backend*, *ssl_context*, and *server_hostname* + are ignored. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.tls()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + # After 3.6 is no longer supported, this can use an AsyncExitStack. + (begin_time, expiration) = _compute_times(timeout) + if not sock: + if ssl_context is None: + ssl_context = ssl.create_default_context() + if server_hostname is None: + ssl_context.check_hostname = False + else: + ssl_context = None + server_hostname = None + af = dns.inet.af_for_address(where) + stuple = _source_tuple(af, source, source_port) + dtuple = (where, port) + if not backend: + backend = dns.asyncbackend.get_default_backend() + s = await backend.make_socket(af, socket.SOCK_STREAM, 0, stuple, + dtuple, timeout, ssl_context, + server_hostname) + else: + s = sock + try: + timeout = _timeout(expiration) + response = await tcp(q, where, timeout, port, source, source_port, + one_rr_per_rrset, ignore_trailing, s, backend) + end_time = time.time() + response.time = end_time - begin_time + return response + finally: + if not sock and s: + await s.close() + +async def https(q, where, timeout=None, port=443, source=None, source_port=0, + one_rr_per_rrset=False, ignore_trailing=False, client=None, + path='/dns-query', post=True, verify=True): + """Return the response obtained after sending a query via DNS-over-HTTPS. + + *client*, a ``httpx.AsyncClient``. If provided, the client to use for + the query. + + Unlike the other dnspython async functions, a backend cannot be provided + in this function because httpx always auto-detects the async backend. + + See :py:func:`dns.query.https()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + if not _have_httpx: + raise NoDOH('httpx is not available.') # pragma: no cover + + wire = q.to_wire() + try: + af = dns.inet.af_for_address(where) + except ValueError: + af = None + transport = None + headers = { + "accept": "application/dns-message" + } + if af is not None: + if af == socket.AF_INET: + url = 'https://{}:{}{}'.format(where, port, path) + elif af == socket.AF_INET6: + url = 'https://[{}]:{}{}'.format(where, port, path) + else: + url = where + if source is not None: + transport = httpx.AsyncHTTPTransport(local_address=source[0]) + + # After 3.6 is no longer supported, this can use an AsyncExitStack + client_to_close = None + try: + if not client: + client = httpx.AsyncClient(http1=True, http2=_have_http2, + verify=verify, transport=transport) + client_to_close = client + + # see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH + # GET and POST examples + if post: + headers.update({ + "content-type": "application/dns-message", + "content-length": str(len(wire)) + }) + response = await client.post(url, headers=headers, content=wire, + timeout=timeout) + else: + wire = base64.urlsafe_b64encode(wire).rstrip(b"=") + wire = wire.decode() # httpx does a repr() if we give it bytes + response = await client.get(url, headers=headers, timeout=timeout, + params={"dns": wire}) + finally: + if client_to_close: + await client.aclose() + + # see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH + # status codes + if response.status_code < 200 or response.status_code > 299: + raise ValueError('{} responded with status code {}' + '\nResponse body: {}'.format(where, + response.status_code, + response.content)) + r = dns.message.from_wire(response.content, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing) + r.time = response.elapsed + if not q.is_response(r): + raise BadResponse + return r + +async def inbound_xfr(where, txn_manager, query=None, + port=53, timeout=None, lifetime=None, source=None, + source_port=0, udp_mode=UDPMode.NEVER, backend=None): + """Conduct an inbound transfer and apply it via a transaction from the + txn_manager. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.inbound_xfr()` for the documentation of + the other parameters, exceptions, and return type of this method. + """ + if query is None: + (query, serial) = dns.xfr.make_query(txn_manager) + else: + serial = dns.xfr.extract_serial_from_query(query) + rdtype = query.question[0].rdtype + is_ixfr = rdtype == dns.rdatatype.IXFR + origin = txn_manager.from_wire_origin() + wire = query.to_wire() + af = dns.inet.af_for_address(where) + stuple = _source_tuple(af, source, source_port) + dtuple = (where, port) + (_, expiration) = _compute_times(lifetime) + retry = True + while retry: + retry = False + if is_ixfr and udp_mode != UDPMode.NEVER: + sock_type = socket.SOCK_DGRAM + is_udp = True + else: + sock_type = socket.SOCK_STREAM + is_udp = False + if not backend: + backend = dns.asyncbackend.get_default_backend() + s = await backend.make_socket(af, sock_type, 0, stuple, dtuple, + _timeout(expiration)) + async with s: + if is_udp: + await s.sendto(wire, dtuple, _timeout(expiration)) + else: + tcpmsg = struct.pack("!H", len(wire)) + wire + await s.sendall(tcpmsg, expiration) + with dns.xfr.Inbound(txn_manager, rdtype, serial, + is_udp) as inbound: + done = False + tsig_ctx = None + while not done: + (_, mexpiration) = _compute_times(timeout) + if mexpiration is None or \ + (expiration is not None and mexpiration > expiration): + mexpiration = expiration + if is_udp: + destination = _lltuple((where, port), af) + while True: + timeout = _timeout(mexpiration) + (rwire, from_address) = await s.recvfrom(65535, + timeout) + if _matches_destination(af, from_address, + destination, True): + break + else: + ldata = await _read_exactly(s, 2, mexpiration) + (l,) = struct.unpack("!H", ldata) + rwire = await _read_exactly(s, l, mexpiration) + is_ixfr = (rdtype == dns.rdatatype.IXFR) + r = dns.message.from_wire(rwire, keyring=query.keyring, + request_mac=query.mac, xfr=True, + origin=origin, tsig_ctx=tsig_ctx, + multi=(not is_udp), + one_rr_per_rrset=is_ixfr) + try: + done = inbound.process_message(r) + except dns.xfr.UseTCP: + assert is_udp # should not happen if we used TCP! + if udp_mode == UDPMode.ONLY: + raise + done = True + retry = True + udp_mode = UDPMode.NEVER + continue + tsig_ctx = r.tsig_ctx + if not retry and query.keyring and not r.had_tsig: + raise dns.exception.FormError("missing TSIG") diff --git a/venv/lib/python3.10/site-packages/dns/asyncquery.pyi b/venv/lib/python3.10/site-packages/dns/asyncquery.pyi new file mode 100644 index 0000000..a03434c --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/asyncquery.pyi @@ -0,0 +1,43 @@ +from typing import Optional, Union, Dict, Generator, Any +from . import tsig, rdatatype, rdataclass, name, message, asyncbackend + +# If the ssl import works, then +# +# error: Name 'ssl' already defined (by an import) +# +# is expected and can be ignored. +try: + import ssl +except ImportError: + class ssl: # type: ignore + SSLContext : Dict = {} + +async def udp(q : message.Message, where : str, + timeout : Optional[float] = None, port=53, + source : Optional[str] = None, source_port : Optional[int] = 0, + ignore_unexpected : Optional[bool] = False, + one_rr_per_rrset : Optional[bool] = False, + ignore_trailing : Optional[bool] = False, + sock : Optional[asyncbackend.DatagramSocket] = None, + backend : Optional[asyncbackend.Backend] = None) -> message.Message: + pass + +async def tcp(q : message.Message, where : str, timeout : float = None, port=53, + af : Optional[int] = None, source : Optional[str] = None, + source_port : Optional[int] = 0, + one_rr_per_rrset : Optional[bool] = False, + ignore_trailing : Optional[bool] = False, + sock : Optional[asyncbackend.StreamSocket] = None, + backend : Optional[asyncbackend.Backend] = None) -> message.Message: + pass + +async def tls(q : message.Message, where : str, + timeout : Optional[float] = None, port=53, + source : Optional[str] = None, source_port : Optional[int] = 0, + one_rr_per_rrset : Optional[bool] = False, + ignore_trailing : Optional[bool] = False, + sock : Optional[asyncbackend.StreamSocket] = None, + backend : Optional[asyncbackend.Backend] = None, + ssl_context: Optional[ssl.SSLContext] = None, + server_hostname: Optional[str] = None) -> message.Message: + pass diff --git a/venv/lib/python3.10/site-packages/dns/asyncresolver.py b/venv/lib/python3.10/site-packages/dns/asyncresolver.py new file mode 100644 index 0000000..ed29dee --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/asyncresolver.py @@ -0,0 +1,232 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Asynchronous DNS stub resolver.""" + +import time + +import dns.asyncbackend +import dns.asyncquery +import dns.exception +import dns.query +import dns.resolver + +# import some resolver symbols for brevity +from dns.resolver import NXDOMAIN, NoAnswer, NotAbsolute, NoRootSOA + + +# for indentation purposes below +_udp = dns.asyncquery.udp +_tcp = dns.asyncquery.tcp + + +class Resolver(dns.resolver.BaseResolver): + """Asynchronous DNS stub resolver.""" + + async def resolve(self, qname, rdtype=dns.rdatatype.A, + rdclass=dns.rdataclass.IN, + tcp=False, source=None, raise_on_no_answer=True, + source_port=0, lifetime=None, search=None, + backend=None): + """Query nameservers asynchronously to find the answer to the question. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.resolver.Resolver.resolve()` for the + documentation of the other parameters, exceptions, and return + type of this method. + """ + + resolution = dns.resolver._Resolution(self, qname, rdtype, rdclass, tcp, + raise_on_no_answer, search) + if not backend: + backend = dns.asyncbackend.get_default_backend() + start = time.time() + while True: + (request, answer) = resolution.next_request() + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + # cache hit! + return answer + done = False + while not done: + (nameserver, port, tcp, backoff) = resolution.next_nameserver() + if backoff: + await backend.sleep(backoff) + timeout = self._compute_timeout(start, lifetime, + resolution.errors) + try: + if dns.inet.is_address(nameserver): + if tcp: + response = await _tcp(request, nameserver, + timeout, port, + source, source_port, + backend=backend) + else: + response = await _udp(request, nameserver, + timeout, port, + source, source_port, + raise_on_truncation=True, + backend=backend) + else: + response = await dns.asyncquery.https(request, + nameserver, + timeout=timeout) + except Exception as ex: + (_, done) = resolution.query_result(None, ex) + continue + (answer, done) = resolution.query_result(response, None) + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + return answer + + async def resolve_address(self, ipaddr, *args, **kwargs): + """Use an asynchronous resolver to run a reverse query for PTR + records. + + This utilizes the resolve() method to perform a PTR lookup on the + specified IP address. + + *ipaddr*, a ``str``, the IPv4 or IPv6 address you want to get + the PTR record for. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + + """ + + return await self.resolve(dns.reversename.from_address(ipaddr), + rdtype=dns.rdatatype.PTR, + rdclass=dns.rdataclass.IN, + *args, **kwargs) + + # pylint: disable=redefined-outer-name + + async def canonical_name(self, name): + """Determine the canonical name of *name*. + + The canonical name is the name the resolver uses for queries + after all CNAME and DNAME renamings have been applied. + + *name*, a ``dns.name.Name`` or ``str``, the query name. + + This method can raise any exception that ``resolve()`` can + raise, other than ``dns.resolver.NoAnswer`` and + ``dns.resolver.NXDOMAIN``. + + Returns a ``dns.name.Name``. + """ + try: + answer = await self.resolve(name, raise_on_no_answer=False) + canonical_name = answer.canonical_name + except dns.resolver.NXDOMAIN as e: + canonical_name = e.canonical_name + return canonical_name + + +default_resolver = None + + +def get_default_resolver(): + """Get the default asynchronous resolver, initializing it if necessary.""" + if default_resolver is None: + reset_default_resolver() + return default_resolver + + +def reset_default_resolver(): + """Re-initialize default asynchronous resolver. + + Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX + systems) will be re-read immediately. + """ + + global default_resolver + default_resolver = Resolver() + + +async def resolve(qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, + tcp=False, source=None, raise_on_no_answer=True, + source_port=0, lifetime=None, search=None, backend=None): + """Query nameservers asynchronously to find the answer to the question. + + This is a convenience function that uses the default resolver + object to make the query. + + See :py:func:`dns.asyncresolver.Resolver.resolve` for more + information on the parameters. + """ + + return await get_default_resolver().resolve(qname, rdtype, rdclass, tcp, + source, raise_on_no_answer, + source_port, lifetime, search, + backend) + + +async def resolve_address(ipaddr, *args, **kwargs): + """Use a resolver to run a reverse query for PTR records. + + See :py:func:`dns.asyncresolver.Resolver.resolve_address` for more + information on the parameters. + """ + + return await get_default_resolver().resolve_address(ipaddr, *args, **kwargs) + +async def canonical_name(name): + """Determine the canonical name of *name*. + + See :py:func:`dns.resolver.Resolver.canonical_name` for more + information on the parameters and possible exceptions. + """ + + return await get_default_resolver().canonical_name(name) + +async def zone_for_name(name, rdclass=dns.rdataclass.IN, tcp=False, + resolver=None, backend=None): + """Find the name of the zone which contains the specified name. + + See :py:func:`dns.resolver.Resolver.zone_for_name` for more + information on the parameters and possible exceptions. + """ + + if isinstance(name, str): + name = dns.name.from_text(name, dns.name.root) + if resolver is None: + resolver = get_default_resolver() + if not name.is_absolute(): + raise NotAbsolute(name) + while True: + try: + answer = await resolver.resolve(name, dns.rdatatype.SOA, rdclass, + tcp, backend=backend) + if answer.rrset.name == name: + return name + # otherwise we were CNAMEd or DNAMEd and need to look higher + except (NXDOMAIN, NoAnswer): + pass + try: + name = name.parent() + except dns.name.NoParent: # pragma: no cover + raise NoRootSOA diff --git a/venv/lib/python3.10/site-packages/dns/asyncresolver.pyi b/venv/lib/python3.10/site-packages/dns/asyncresolver.pyi new file mode 100644 index 0000000..92759d2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/asyncresolver.pyi @@ -0,0 +1,26 @@ +from typing import Union, Optional, List, Any, Dict +from . import exception, rdataclass, name, rdatatype, asyncbackend + +async def resolve(qname : str, rdtype : Union[int,str] = 0, + rdclass : Union[int,str] = 0, + tcp=False, source=None, raise_on_no_answer=True, + source_port=0, lifetime : Optional[float]=None, + search : Optional[bool]=None, + backend : Optional[asyncbackend.Backend]=None): + ... +async def resolve_address(self, ipaddr: str, + *args: Any, **kwargs: Optional[Dict]): + ... + +class Resolver: + def __init__(self, filename : Optional[str] = '/etc/resolv.conf', + configure : Optional[bool] = True): + self.nameservers : List[str] + async def resolve(self, qname : str, rdtype : Union[int,str] = rdatatype.A, + rdclass : Union[int,str] = rdataclass.IN, + tcp : bool = False, source : Optional[str] = None, + raise_on_no_answer=True, source_port : int = 0, + lifetime : Optional[float]=None, + search : Optional[bool]=None, + backend : Optional[asyncbackend.Backend]=None): + ... diff --git a/venv/lib/python3.10/site-packages/dns/dnssec.py b/venv/lib/python3.10/site-packages/dns/dnssec.py new file mode 100644 index 0000000..dee4e61 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/dnssec.py @@ -0,0 +1,594 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Common DNSSEC-related functions and constants.""" + +import hashlib +import struct +import time +import base64 + +import dns.enum +import dns.exception +import dns.name +import dns.node +import dns.rdataset +import dns.rdata +import dns.rdatatype +import dns.rdataclass + + +class UnsupportedAlgorithm(dns.exception.DNSException): + """The DNSSEC algorithm is not supported.""" + + +class ValidationFailure(dns.exception.DNSException): + """The DNSSEC signature is invalid.""" + + +class Algorithm(dns.enum.IntEnum): + RSAMD5 = 1 + DH = 2 + DSA = 3 + ECC = 4 + RSASHA1 = 5 + DSANSEC3SHA1 = 6 + RSASHA1NSEC3SHA1 = 7 + RSASHA256 = 8 + RSASHA512 = 10 + ECCGOST = 12 + ECDSAP256SHA256 = 13 + ECDSAP384SHA384 = 14 + ED25519 = 15 + ED448 = 16 + INDIRECT = 252 + PRIVATEDNS = 253 + PRIVATEOID = 254 + + @classmethod + def _maximum(cls): + return 255 + + +def algorithm_from_text(text): + """Convert text into a DNSSEC algorithm value. + + *text*, a ``str``, the text to convert to into an algorithm value. + + Returns an ``int``. + """ + + return Algorithm.from_text(text) + + +def algorithm_to_text(value): + """Convert a DNSSEC algorithm value to text + + *value*, an ``int`` a DNSSEC algorithm. + + Returns a ``str``, the name of a DNSSEC algorithm. + """ + + return Algorithm.to_text(value) + + +def key_id(key): + """Return the key id (a 16-bit number) for the specified key. + + *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` + + Returns an ``int`` between 0 and 65535 + """ + + rdata = key.to_wire() + if key.algorithm == Algorithm.RSAMD5: + return (rdata[-3] << 8) + rdata[-2] + else: + total = 0 + for i in range(len(rdata) // 2): + total += (rdata[2 * i] << 8) + \ + rdata[2 * i + 1] + if len(rdata) % 2 != 0: + total += rdata[len(rdata) - 1] << 8 + total += ((total >> 16) & 0xffff) + return total & 0xffff + +class DSDigest(dns.enum.IntEnum): + """DNSSEC Delegation Signer Digest Algorithm""" + + SHA1 = 1 + SHA256 = 2 + SHA384 = 4 + + @classmethod + def _maximum(cls): + return 255 + + +def make_ds(name, key, algorithm, origin=None): + """Create a DS record for a DNSSEC key. + + *name*, a ``dns.name.Name`` or ``str``, the owner name of the DS record. + + *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY``, the key the DS is about. + + *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. + The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case + does not matter for these strings. + + *origin*, a ``dns.name.Name`` or ``None``. If `key` is a relative name, + then it will be made absolute using the specified origin. + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. + + Returns a ``dns.rdtypes.ANY.DS.DS`` + """ + + try: + if isinstance(algorithm, str): + algorithm = DSDigest[algorithm.upper()] + except Exception: + raise UnsupportedAlgorithm('unsupported algorithm "%s"' % algorithm) + + if algorithm == DSDigest.SHA1: + dshash = hashlib.sha1() + elif algorithm == DSDigest.SHA256: + dshash = hashlib.sha256() + elif algorithm == DSDigest.SHA384: + dshash = hashlib.sha384() + else: + raise UnsupportedAlgorithm('unsupported algorithm "%s"' % algorithm) + + if isinstance(name, str): + name = dns.name.from_text(name, origin) + dshash.update(name.canonicalize().to_wire()) + dshash.update(key.to_wire(origin=origin)) + digest = dshash.digest() + + dsrdata = struct.pack("!HBB", key_id(key), key.algorithm, algorithm) + \ + digest + return dns.rdata.from_wire(dns.rdataclass.IN, dns.rdatatype.DS, dsrdata, 0, + len(dsrdata)) + + +def _find_candidate_keys(keys, rrsig): + value = keys.get(rrsig.signer) + if isinstance(value, dns.node.Node): + rdataset = value.get_rdataset(dns.rdataclass.IN, dns.rdatatype.DNSKEY) + else: + rdataset = value + if rdataset is None: + return None + return [rd for rd in rdataset if + rd.algorithm == rrsig.algorithm and key_id(rd) == rrsig.key_tag] + + +def _is_rsa(algorithm): + return algorithm in (Algorithm.RSAMD5, Algorithm.RSASHA1, + Algorithm.RSASHA1NSEC3SHA1, Algorithm.RSASHA256, + Algorithm.RSASHA512) + + +def _is_dsa(algorithm): + return algorithm in (Algorithm.DSA, Algorithm.DSANSEC3SHA1) + + +def _is_ecdsa(algorithm): + return algorithm in (Algorithm.ECDSAP256SHA256, Algorithm.ECDSAP384SHA384) + + +def _is_eddsa(algorithm): + return algorithm in (Algorithm.ED25519, Algorithm.ED448) + + +def _is_gost(algorithm): + return algorithm == Algorithm.ECCGOST + + +def _is_md5(algorithm): + return algorithm == Algorithm.RSAMD5 + + +def _is_sha1(algorithm): + return algorithm in (Algorithm.DSA, Algorithm.RSASHA1, + Algorithm.DSANSEC3SHA1, Algorithm.RSASHA1NSEC3SHA1) + + +def _is_sha256(algorithm): + return algorithm in (Algorithm.RSASHA256, Algorithm.ECDSAP256SHA256) + + +def _is_sha384(algorithm): + return algorithm == Algorithm.ECDSAP384SHA384 + + +def _is_sha512(algorithm): + return algorithm == Algorithm.RSASHA512 + + +def _make_hash(algorithm): + if _is_md5(algorithm): + return hashes.MD5() + if _is_sha1(algorithm): + return hashes.SHA1() + if _is_sha256(algorithm): + return hashes.SHA256() + if _is_sha384(algorithm): + return hashes.SHA384() + if _is_sha512(algorithm): + return hashes.SHA512() + if algorithm == Algorithm.ED25519: + return hashes.SHA512() + if algorithm == Algorithm.ED448: + return hashes.SHAKE256(114) + + raise ValidationFailure('unknown hash for algorithm %u' % algorithm) + + +def _bytes_to_long(b): + return int.from_bytes(b, 'big') + + +def _validate_signature(sig, data, key, chosen_hash): + if _is_rsa(key.algorithm): + keyptr = key.key + (bytes_,) = struct.unpack('!B', keyptr[0:1]) + keyptr = keyptr[1:] + if bytes_ == 0: + (bytes_,) = struct.unpack('!H', keyptr[0:2]) + keyptr = keyptr[2:] + rsa_e = keyptr[0:bytes_] + rsa_n = keyptr[bytes_:] + try: + public_key = rsa.RSAPublicNumbers( + _bytes_to_long(rsa_e), + _bytes_to_long(rsa_n)).public_key(default_backend()) + except ValueError: + raise ValidationFailure('invalid public key') + public_key.verify(sig, data, padding.PKCS1v15(), chosen_hash) + elif _is_dsa(key.algorithm): + keyptr = key.key + (t,) = struct.unpack('!B', keyptr[0:1]) + keyptr = keyptr[1:] + octets = 64 + t * 8 + dsa_q = keyptr[0:20] + keyptr = keyptr[20:] + dsa_p = keyptr[0:octets] + keyptr = keyptr[octets:] + dsa_g = keyptr[0:octets] + keyptr = keyptr[octets:] + dsa_y = keyptr[0:octets] + try: + public_key = dsa.DSAPublicNumbers( + _bytes_to_long(dsa_y), + dsa.DSAParameterNumbers( + _bytes_to_long(dsa_p), + _bytes_to_long(dsa_q), + _bytes_to_long(dsa_g))).public_key(default_backend()) + except ValueError: + raise ValidationFailure('invalid public key') + public_key.verify(sig, data, chosen_hash) + elif _is_ecdsa(key.algorithm): + keyptr = key.key + if key.algorithm == Algorithm.ECDSAP256SHA256: + curve = ec.SECP256R1() + octets = 32 + else: + curve = ec.SECP384R1() + octets = 48 + ecdsa_x = keyptr[0:octets] + ecdsa_y = keyptr[octets:octets * 2] + try: + public_key = ec.EllipticCurvePublicNumbers( + curve=curve, + x=_bytes_to_long(ecdsa_x), + y=_bytes_to_long(ecdsa_y)).public_key(default_backend()) + except ValueError: + raise ValidationFailure('invalid public key') + public_key.verify(sig, data, ec.ECDSA(chosen_hash)) + elif _is_eddsa(key.algorithm): + keyptr = key.key + if key.algorithm == Algorithm.ED25519: + loader = ed25519.Ed25519PublicKey + else: + loader = ed448.Ed448PublicKey + try: + public_key = loader.from_public_bytes(keyptr) + except ValueError: + raise ValidationFailure('invalid public key') + public_key.verify(sig, data) + elif _is_gost(key.algorithm): + raise UnsupportedAlgorithm( + 'algorithm "%s" not supported by dnspython' % + algorithm_to_text(key.algorithm)) + else: + raise ValidationFailure('unknown algorithm %u' % key.algorithm) + + +def _validate_rrsig(rrset, rrsig, keys, origin=None, now=None): + """Validate an RRset against a single signature rdata, throwing an + exception if validation is not successful. + + *rrset*, the RRset to validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *rrsig*, a ``dns.rdata.Rdata``, the signature to validate. + + *keys*, the key dictionary, used to find the DNSKEY associated + with a given name. The dictionary is keyed by a + ``dns.name.Name``, and has ``dns.node.Node`` or + ``dns.rdataset.Rdataset`` values. + + *origin*, a ``dns.name.Name`` or ``None``, the origin to use for relative + names. + + *now*, an ``int`` or ``None``, the time, in seconds since the epoch, to + use as the current time when validating. If ``None``, the actual current + time is used. + + Raises ``ValidationFailure`` if the signature is expired, not yet valid, + the public key is invalid, the algorithm is unknown, the verification + fails, etc. + + Raises ``UnsupportedAlgorithm`` if the algorithm is recognized by + dnspython but not implemented. + """ + + if isinstance(origin, str): + origin = dns.name.from_text(origin, dns.name.root) + + candidate_keys = _find_candidate_keys(keys, rrsig) + if candidate_keys is None: + raise ValidationFailure('unknown key') + + # For convenience, allow the rrset to be specified as a (name, + # rdataset) tuple as well as a proper rrset + if isinstance(rrset, tuple): + rrname = rrset[0] + rdataset = rrset[1] + else: + rrname = rrset.name + rdataset = rrset + + if now is None: + now = time.time() + if rrsig.expiration < now: + raise ValidationFailure('expired') + if rrsig.inception > now: + raise ValidationFailure('not yet valid') + + if _is_dsa(rrsig.algorithm): + sig_r = rrsig.signature[1:21] + sig_s = rrsig.signature[21:] + sig = utils.encode_dss_signature(_bytes_to_long(sig_r), + _bytes_to_long(sig_s)) + elif _is_ecdsa(rrsig.algorithm): + if rrsig.algorithm == Algorithm.ECDSAP256SHA256: + octets = 32 + else: + octets = 48 + sig_r = rrsig.signature[0:octets] + sig_s = rrsig.signature[octets:] + sig = utils.encode_dss_signature(_bytes_to_long(sig_r), + _bytes_to_long(sig_s)) + else: + sig = rrsig.signature + + data = b'' + data += rrsig.to_wire(origin=origin)[:18] + data += rrsig.signer.to_digestable(origin) + + # Derelativize the name before considering labels. + rrname = rrname.derelativize(origin) + + if len(rrname) - 1 < rrsig.labels: + raise ValidationFailure('owner name longer than RRSIG labels') + elif rrsig.labels < len(rrname) - 1: + suffix = rrname.split(rrsig.labels + 1)[1] + rrname = dns.name.from_text('*', suffix) + rrnamebuf = rrname.to_digestable() + rrfixed = struct.pack('!HHI', rdataset.rdtype, rdataset.rdclass, + rrsig.original_ttl) + rdatas = [rdata.to_digestable(origin) for rdata in rdataset] + for rdata in sorted(rdatas): + data += rrnamebuf + data += rrfixed + rrlen = struct.pack('!H', len(rdata)) + data += rrlen + data += rdata + + chosen_hash = _make_hash(rrsig.algorithm) + + for candidate_key in candidate_keys: + try: + _validate_signature(sig, data, candidate_key, chosen_hash) + return + except (InvalidSignature, ValidationFailure): + # this happens on an individual validation failure + continue + # nothing verified -- raise failure: + raise ValidationFailure('verify failure') + + +def _validate(rrset, rrsigset, keys, origin=None, now=None): + """Validate an RRset against a signature RRset, throwing an exception + if none of the signatures validate. + + *rrset*, the RRset to validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *rrsigset*, the signature RRset. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *keys*, the key dictionary, used to find the DNSKEY associated + with a given name. The dictionary is keyed by a + ``dns.name.Name``, and has ``dns.node.Node`` or + ``dns.rdataset.Rdataset`` values. + + *origin*, a ``dns.name.Name``, the origin to use for relative names; + defaults to None. + + *now*, an ``int`` or ``None``, the time, in seconds since the epoch, to + use as the current time when validating. If ``None``, the actual current + time is used. + + Raises ``ValidationFailure`` if the signature is expired, not yet valid, + the public key is invalid, the algorithm is unknown, the verification + fails, etc. + """ + + if isinstance(origin, str): + origin = dns.name.from_text(origin, dns.name.root) + + if isinstance(rrset, tuple): + rrname = rrset[0] + else: + rrname = rrset.name + + if isinstance(rrsigset, tuple): + rrsigname = rrsigset[0] + rrsigrdataset = rrsigset[1] + else: + rrsigname = rrsigset.name + rrsigrdataset = rrsigset + + rrname = rrname.choose_relativity(origin) + rrsigname = rrsigname.choose_relativity(origin) + if rrname != rrsigname: + raise ValidationFailure("owner names do not match") + + for rrsig in rrsigrdataset: + try: + _validate_rrsig(rrset, rrsig, keys, origin, now) + return + except (ValidationFailure, UnsupportedAlgorithm): + pass + raise ValidationFailure("no RRSIGs validated") + + +class NSEC3Hash(dns.enum.IntEnum): + """NSEC3 hash algorithm""" + + SHA1 = 1 + + @classmethod + def _maximum(cls): + return 255 + +def nsec3_hash(domain, salt, iterations, algorithm): + """ + Calculate the NSEC3 hash, according to + https://tools.ietf.org/html/rfc5155#section-5 + + *domain*, a ``dns.name.Name`` or ``str``, the name to hash. + + *salt*, a ``str``, ``bytes``, or ``None``, the hash salt. If a + string, it is decoded as a hex string. + + *iterations*, an ``int``, the number of iterations. + + *algorithm*, a ``str`` or ``int``, the hash algorithm. + The only defined algorithm is SHA1. + + Returns a ``str``, the encoded NSEC3 hash. + """ + + b32_conversion = str.maketrans( + "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567", "0123456789ABCDEFGHIJKLMNOPQRSTUV" + ) + + try: + if isinstance(algorithm, str): + algorithm = NSEC3Hash[algorithm.upper()] + except Exception: + raise ValueError("Wrong hash algorithm (only SHA1 is supported)") + + if algorithm != NSEC3Hash.SHA1: + raise ValueError("Wrong hash algorithm (only SHA1 is supported)") + + salt_encoded = salt + if salt is None: + salt_encoded = b'' + elif isinstance(salt, str): + if len(salt) % 2 == 0: + salt_encoded = bytes.fromhex(salt) + else: + raise ValueError("Invalid salt length") + + if not isinstance(domain, dns.name.Name): + domain = dns.name.from_text(domain) + domain_encoded = domain.canonicalize().to_wire() + + digest = hashlib.sha1(domain_encoded + salt_encoded).digest() + for _ in range(iterations): + digest = hashlib.sha1(digest + salt_encoded).digest() + + output = base64.b32encode(digest).decode("utf-8") + output = output.translate(b32_conversion) + + return output + + +def _need_pyca(*args, **kwargs): + raise ImportError("DNSSEC validation requires " + + "python cryptography") # pragma: no cover + + +try: + from cryptography.exceptions import InvalidSignature + from cryptography.hazmat.backends import default_backend + from cryptography.hazmat.primitives import hashes + from cryptography.hazmat.primitives.asymmetric import padding + from cryptography.hazmat.primitives.asymmetric import utils + from cryptography.hazmat.primitives.asymmetric import dsa + from cryptography.hazmat.primitives.asymmetric import ec + from cryptography.hazmat.primitives.asymmetric import ed25519 + from cryptography.hazmat.primitives.asymmetric import ed448 + from cryptography.hazmat.primitives.asymmetric import rsa +except ImportError: # pragma: no cover + validate = _need_pyca + validate_rrsig = _need_pyca + _have_pyca = False +else: + validate = _validate # type: ignore + validate_rrsig = _validate_rrsig # type: ignore + _have_pyca = True + +### BEGIN generated Algorithm constants + +RSAMD5 = Algorithm.RSAMD5 +DH = Algorithm.DH +DSA = Algorithm.DSA +ECC = Algorithm.ECC +RSASHA1 = Algorithm.RSASHA1 +DSANSEC3SHA1 = Algorithm.DSANSEC3SHA1 +RSASHA1NSEC3SHA1 = Algorithm.RSASHA1NSEC3SHA1 +RSASHA256 = Algorithm.RSASHA256 +RSASHA512 = Algorithm.RSASHA512 +ECCGOST = Algorithm.ECCGOST +ECDSAP256SHA256 = Algorithm.ECDSAP256SHA256 +ECDSAP384SHA384 = Algorithm.ECDSAP384SHA384 +ED25519 = Algorithm.ED25519 +ED448 = Algorithm.ED448 +INDIRECT = Algorithm.INDIRECT +PRIVATEDNS = Algorithm.PRIVATEDNS +PRIVATEOID = Algorithm.PRIVATEOID + +### END generated Algorithm constants diff --git a/venv/lib/python3.10/site-packages/dns/dnssec.pyi b/venv/lib/python3.10/site-packages/dns/dnssec.pyi new file mode 100644 index 0000000..e126f9b --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/dnssec.pyi @@ -0,0 +1,21 @@ +from typing import Union, Dict, Tuple, Optional +from . import rdataset, rrset, exception, name, rdtypes, rdata, node +import dns.rdtypes.ANY.DS as DS +import dns.rdtypes.ANY.DNSKEY as DNSKEY + +_have_pyca : bool + +def validate_rrsig(rrset : Union[Tuple[name.Name, rdataset.Rdataset], rrset.RRset], rrsig : rdata.Rdata, keys : Dict[name.Name, Union[node.Node, rdataset.Rdataset]], origin : Optional[name.Name] = None, now : Optional[int] = None) -> None: + ... + +def validate(rrset: Union[Tuple[name.Name, rdataset.Rdataset], rrset.RRset], rrsigset : Union[Tuple[name.Name, rdataset.Rdataset], rrset.RRset], keys : Dict[name.Name, Union[node.Node, rdataset.Rdataset]], origin=None, now=None) -> None: + ... + +class ValidationFailure(exception.DNSException): + ... + +def make_ds(name : name.Name, key : DNSKEY.DNSKEY, algorithm : str, origin : Optional[name.Name] = None) -> DS.DS: + ... + +def nsec3_hash(domain: str, salt: Optional[Union[str, bytes]], iterations: int, algo: int) -> str: + ... diff --git a/venv/lib/python3.10/site-packages/dns/e164.py b/venv/lib/python3.10/site-packages/dns/e164.py new file mode 100644 index 0000000..83731b2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/e164.py @@ -0,0 +1,104 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS E.164 helpers.""" + +import dns.exception +import dns.name +import dns.resolver + +#: The public E.164 domain. +public_enum_domain = dns.name.from_text('e164.arpa.') + + +def from_e164(text, origin=public_enum_domain): + """Convert an E.164 number in textual form into a Name object whose + value is the ENUM domain name for that number. + + Non-digits in the text are ignored, i.e. "16505551212", + "+1.650.555.1212" and "1 (650) 555-1212" are all the same. + + *text*, a ``str``, is an E.164 number in textual form. + + *origin*, a ``dns.name.Name``, the domain in which the number + should be constructed. The default is ``e164.arpa.``. + + Returns a ``dns.name.Name``. + """ + + parts = [d for d in text if d.isdigit()] + parts.reverse() + return dns.name.from_text('.'.join(parts), origin=origin) + + +def to_e164(name, origin=public_enum_domain, want_plus_prefix=True): + """Convert an ENUM domain name into an E.164 number. + + Note that dnspython does not have any information about preferred + number formats within national numbering plans, so all numbers are + emitted as a simple string of digits, prefixed by a '+' (unless + *want_plus_prefix* is ``False``). + + *name* is a ``dns.name.Name``, the ENUM domain name. + + *origin* is a ``dns.name.Name``, a domain containing the ENUM + domain name. The name is relativized to this domain before being + converted to text. If ``None``, no relativization is done. + + *want_plus_prefix* is a ``bool``. If True, add a '+' to the beginning of + the returned number. + + Returns a ``str``. + + """ + if origin is not None: + name = name.relativize(origin) + dlabels = [d for d in name.labels if d.isdigit() and len(d) == 1] + if len(dlabels) != len(name.labels): + raise dns.exception.SyntaxError('non-digit labels in ENUM domain name') + dlabels.reverse() + text = b''.join(dlabels) + if want_plus_prefix: + text = b'+' + text + return text.decode() + + +def query(number, domains, resolver=None): + """Look for NAPTR RRs for the specified number in the specified domains. + + e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.']) + + *number*, a ``str`` is the number to look for. + + *domains* is an iterable containing ``dns.name.Name`` values. + + *resolver*, a ``dns.resolver.Resolver``, is the resolver to use. If + ``None``, the default resolver is used. + """ + + if resolver is None: + resolver = dns.resolver.get_default_resolver() + e_nx = dns.resolver.NXDOMAIN() + for domain in domains: + if isinstance(domain, str): + domain = dns.name.from_text(domain) + qname = dns.e164.from_e164(number, domain) + try: + return resolver.resolve(qname, 'NAPTR') + except dns.resolver.NXDOMAIN as e: + e_nx += e + raise e_nx diff --git a/venv/lib/python3.10/site-packages/dns/e164.pyi b/venv/lib/python3.10/site-packages/dns/e164.pyi new file mode 100644 index 0000000..37a99fe --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/e164.pyi @@ -0,0 +1,10 @@ +from typing import Optional, Iterable +from . import name, resolver +def from_e164(text : str, origin=name.Name(".")) -> name.Name: + ... + +def to_e164(name : name.Name, origin : Optional[name.Name] = None, want_plus_prefix=True) -> str: + ... + +def query(number : str, domains : Iterable[str], resolver : Optional[resolver.Resolver] = None) -> resolver.Answer: + ... diff --git a/venv/lib/python3.10/site-packages/dns/edns.py b/venv/lib/python3.10/site-packages/dns/edns.py new file mode 100644 index 0000000..9d7e909 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/edns.py @@ -0,0 +1,464 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""EDNS Options""" + +import math +import socket +import struct + +import dns.enum +import dns.inet +import dns.rdata + + +class OptionType(dns.enum.IntEnum): + #: NSID + NSID = 3 + #: DAU + DAU = 5 + #: DHU + DHU = 6 + #: N3U + N3U = 7 + #: ECS (client-subnet) + ECS = 8 + #: EXPIRE + EXPIRE = 9 + #: COOKIE + COOKIE = 10 + #: KEEPALIVE + KEEPALIVE = 11 + #: PADDING + PADDING = 12 + #: CHAIN + CHAIN = 13 + #: EDE (extended-dns-error) + EDE = 15 + + @classmethod + def _maximum(cls): + return 65535 + + +class Option: + + """Base class for all EDNS option types.""" + + def __init__(self, otype): + """Initialize an option. + + *otype*, an ``int``, is the option type. + """ + self.otype = OptionType.make(otype) + + def to_wire(self, file=None): + """Convert an option to wire format. + + Returns a ``bytes`` or ``None``. + + """ + raise NotImplementedError # pragma: no cover + + @classmethod + def from_wire_parser(cls, otype, parser): + """Build an EDNS option object from wire format. + + *otype*, an ``int``, is the option type. + + *parser*, a ``dns.wire.Parser``, the parser, which should be + restructed to the option length. + + Returns a ``dns.edns.Option``. + """ + raise NotImplementedError # pragma: no cover + + def _cmp(self, other): + """Compare an EDNS option with another option of the same type. + + Returns < 0 if < *other*, 0 if == *other*, and > 0 if > *other*. + """ + wire = self.to_wire() + owire = other.to_wire() + if wire == owire: + return 0 + if wire > owire: + return 1 + return -1 + + def __eq__(self, other): + if not isinstance(other, Option): + return False + if self.otype != other.otype: + return False + return self._cmp(other) == 0 + + def __ne__(self, other): + if not isinstance(other, Option): + return True + if self.otype != other.otype: + return True + return self._cmp(other) != 0 + + def __lt__(self, other): + if not isinstance(other, Option) or \ + self.otype != other.otype: + return NotImplemented + return self._cmp(other) < 0 + + def __le__(self, other): + if not isinstance(other, Option) or \ + self.otype != other.otype: + return NotImplemented + return self._cmp(other) <= 0 + + def __ge__(self, other): + if not isinstance(other, Option) or \ + self.otype != other.otype: + return NotImplemented + return self._cmp(other) >= 0 + + def __gt__(self, other): + if not isinstance(other, Option) or \ + self.otype != other.otype: + return NotImplemented + return self._cmp(other) > 0 + + def __str__(self): + return self.to_text() + + +class GenericOption(Option): + + """Generic Option Class + + This class is used for EDNS option types for which we have no better + implementation. + """ + + def __init__(self, otype, data): + super().__init__(otype) + self.data = dns.rdata.Rdata._as_bytes(data, True) + + def to_wire(self, file=None): + if file: + file.write(self.data) + else: + return self.data + + def to_text(self): + return "Generic %d" % self.otype + + @classmethod + def from_wire_parser(cls, otype, parser): + return cls(otype, parser.get_remaining()) + + +class ECSOption(Option): + """EDNS Client Subnet (ECS, RFC7871)""" + + def __init__(self, address, srclen=None, scopelen=0): + """*address*, a ``str``, is the client address information. + + *srclen*, an ``int``, the source prefix length, which is the + leftmost number of bits of the address to be used for the + lookup. The default is 24 for IPv4 and 56 for IPv6. + + *scopelen*, an ``int``, the scope prefix length. This value + must be 0 in queries, and should be set in responses. + """ + + super().__init__(OptionType.ECS) + af = dns.inet.af_for_address(address) + + if af == socket.AF_INET6: + self.family = 2 + if srclen is None: + srclen = 56 + address = dns.rdata.Rdata._as_ipv6_address(address) + srclen = dns.rdata.Rdata._as_int(srclen, 0, 128) + scopelen = dns.rdata.Rdata._as_int(scopelen, 0, 128) + elif af == socket.AF_INET: + self.family = 1 + if srclen is None: + srclen = 24 + address = dns.rdata.Rdata._as_ipv4_address(address) + srclen = dns.rdata.Rdata._as_int(srclen, 0, 32) + scopelen = dns.rdata.Rdata._as_int(scopelen, 0, 32) + else: # pragma: no cover (this will never happen) + raise ValueError('Bad address family') + + self.address = address + self.srclen = srclen + self.scopelen = scopelen + + addrdata = dns.inet.inet_pton(af, address) + nbytes = int(math.ceil(srclen / 8.0)) + + # Truncate to srclen and pad to the end of the last octet needed + # See RFC section 6 + self.addrdata = addrdata[:nbytes] + nbits = srclen % 8 + if nbits != 0: + last = struct.pack('B', + ord(self.addrdata[-1:]) & (0xff << (8 - nbits))) + self.addrdata = self.addrdata[:-1] + last + + def to_text(self): + return "ECS {}/{} scope/{}".format(self.address, self.srclen, + self.scopelen) + + @staticmethod + def from_text(text): + """Convert a string into a `dns.edns.ECSOption` + + *text*, a `str`, the text form of the option. + + Returns a `dns.edns.ECSOption`. + + Examples: + + >>> import dns.edns + >>> + >>> # basic example + >>> dns.edns.ECSOption.from_text('1.2.3.4/24') + >>> + >>> # also understands scope + >>> dns.edns.ECSOption.from_text('1.2.3.4/24/32') + >>> + >>> # IPv6 + >>> dns.edns.ECSOption.from_text('2001:4b98::1/64/64') + >>> + >>> # it understands results from `dns.edns.ECSOption.to_text()` + >>> dns.edns.ECSOption.from_text('ECS 1.2.3.4/24/32') + """ + optional_prefix = 'ECS' + tokens = text.split() + ecs_text = None + if len(tokens) == 1: + ecs_text = tokens[0] + elif len(tokens) == 2: + if tokens[0] != optional_prefix: + raise ValueError('could not parse ECS from "{}"'.format(text)) + ecs_text = tokens[1] + else: + raise ValueError('could not parse ECS from "{}"'.format(text)) + n_slashes = ecs_text.count('/') + if n_slashes == 1: + address, srclen = ecs_text.split('/') + scope = 0 + elif n_slashes == 2: + address, srclen, scope = ecs_text.split('/') + else: + raise ValueError('could not parse ECS from "{}"'.format(text)) + try: + scope = int(scope) + except ValueError: + raise ValueError('invalid scope ' + + '"{}": scope must be an integer'.format(scope)) + try: + srclen = int(srclen) + except ValueError: + raise ValueError('invalid srclen ' + + '"{}": srclen must be an integer'.format(srclen)) + return ECSOption(address, srclen, scope) + + def to_wire(self, file=None): + value = (struct.pack('!HBB', self.family, self.srclen, self.scopelen) + + self.addrdata) + if file: + file.write(value) + else: + return value + + @classmethod + def from_wire_parser(cls, otype, parser): + family, src, scope = parser.get_struct('!HBB') + addrlen = int(math.ceil(src / 8.0)) + prefix = parser.get_bytes(addrlen) + if family == 1: + pad = 4 - addrlen + addr = dns.ipv4.inet_ntoa(prefix + b'\x00' * pad) + elif family == 2: + pad = 16 - addrlen + addr = dns.ipv6.inet_ntoa(prefix + b'\x00' * pad) + else: + raise ValueError('unsupported family') + + return cls(addr, src, scope) + + +class EDECode(dns.enum.IntEnum): + OTHER = 0 + UNSUPPORTED_DNSKEY_ALGORITHM = 1 + UNSUPPORTED_DS_DIGEST_TYPE = 2 + STALE_ANSWER = 3 + FORGED_ANSWER = 4 + DNSSEC_INDETERMINATE = 5 + DNSSEC_BOGUS = 6 + SIGNATURE_EXPIRED = 7 + SIGNATURE_NOT_YET_VALID = 8 + DNSKEY_MISSING = 9 + RRSIGS_MISSING = 10 + NO_ZONE_KEY_BIT_SET = 11 + NSEC_MISSING = 12 + CACHED_ERROR = 13 + NOT_READY = 14 + BLOCKED = 15 + CENSORED = 16 + FILTERED = 17 + PROHIBITED = 18 + STALE_NXDOMAIN_ANSWER = 19 + NOT_AUTHORITATIVE = 20 + NOT_SUPPORTED = 21 + NO_REACHABLE_AUTHORITY = 22 + NETWORK_ERROR = 23 + INVALID_DATA = 24 + + @classmethod + def _maximum(cls): + return 65535 + + +class EDEOption(Option): + """Extended DNS Error (EDE, RFC8914)""" + + def __init__(self, code, text=None): + """*code*, a ``dns.edns.EDECode`` or ``str``, the info code of the + extended error. + + *text*, a ``str`` or ``None``, specifying additional information about + the error. + """ + + super().__init__(OptionType.EDE) + + self.code = EDECode.make(code) + if text is not None and not isinstance(text, str): + raise ValueError('text must be string or None') + + self.code = code + self.text = text + + def to_text(self): + output = f'EDE {self.code}' + if self.text is not None: + output += f': {self.text}' + return output + + def to_wire(self, file=None): + value = struct.pack('!H', self.code) + if self.text is not None: + value += self.text.encode('utf8') + + if file: + file.write(value) + else: + return value + + @classmethod + def from_wire_parser(cls, otype, parser): + code = parser.get_uint16() + text = parser.get_remaining() + + if text: + if text[-1] == 0: # text MAY be null-terminated + text = text[:-1] + text = text.decode('utf8') + else: + text = None + + return cls(code, text) + + +_type_to_class = { + OptionType.ECS: ECSOption, + OptionType.EDE: EDEOption, +} + + +def get_option_class(otype): + """Return the class for the specified option type. + + The GenericOption class is used if a more specific class is not + known. + """ + + cls = _type_to_class.get(otype) + if cls is None: + cls = GenericOption + return cls + + +def option_from_wire_parser(otype, parser): + """Build an EDNS option object from wire format. + + *otype*, an ``int``, is the option type. + + *parser*, a ``dns.wire.Parser``, the parser, which should be + restricted to the option length. + + Returns an instance of a subclass of ``dns.edns.Option``. + """ + cls = get_option_class(otype) + otype = OptionType.make(otype) + return cls.from_wire_parser(otype, parser) + + +def option_from_wire(otype, wire, current, olen): + """Build an EDNS option object from wire format. + + *otype*, an ``int``, is the option type. + + *wire*, a ``bytes``, is the wire-format message. + + *current*, an ``int``, is the offset in *wire* of the beginning + of the rdata. + + *olen*, an ``int``, is the length of the wire-format option data + + Returns an instance of a subclass of ``dns.edns.Option``. + """ + parser = dns.wire.Parser(wire, current) + with parser.restrict_to(olen): + return option_from_wire_parser(otype, parser) + +def register_type(implementation, otype): + """Register the implementation of an option type. + + *implementation*, a ``class``, is a subclass of ``dns.edns.Option``. + + *otype*, an ``int``, is the option type. + """ + + _type_to_class[otype] = implementation + +### BEGIN generated OptionType constants + +NSID = OptionType.NSID +DAU = OptionType.DAU +DHU = OptionType.DHU +N3U = OptionType.N3U +ECS = OptionType.ECS +EXPIRE = OptionType.EXPIRE +COOKIE = OptionType.COOKIE +KEEPALIVE = OptionType.KEEPALIVE +PADDING = OptionType.PADDING +CHAIN = OptionType.CHAIN +EDE = OptionType.EDE + +### END generated OptionType constants diff --git a/venv/lib/python3.10/site-packages/dns/entropy.py b/venv/lib/python3.10/site-packages/dns/entropy.py new file mode 100644 index 0000000..086bba7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/entropy.py @@ -0,0 +1,129 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import os +import hashlib +import random +import time +try: + import threading as _threading +except ImportError: # pragma: no cover + import dummy_threading as _threading # type: ignore + + +class EntropyPool: + + # This is an entropy pool for Python implementations that do not + # have a working SystemRandom. I'm not sure there are any, but + # leaving this code doesn't hurt anything as the library code + # is used if present. + + def __init__(self, seed=None): + self.pool_index = 0 + self.digest = None + self.next_byte = 0 + self.lock = _threading.Lock() + self.hash = hashlib.sha1() + self.hash_len = 20 + self.pool = bytearray(b'\0' * self.hash_len) + if seed is not None: + self._stir(bytearray(seed)) + self.seeded = True + self.seed_pid = os.getpid() + else: + self.seeded = False + self.seed_pid = 0 + + def _stir(self, entropy): + for c in entropy: + if self.pool_index == self.hash_len: + self.pool_index = 0 + b = c & 0xff + self.pool[self.pool_index] ^= b + self.pool_index += 1 + + def stir(self, entropy): + with self.lock: + self._stir(entropy) + + def _maybe_seed(self): + if not self.seeded or self.seed_pid != os.getpid(): + try: + seed = os.urandom(16) + except Exception: # pragma: no cover + try: + with open('/dev/urandom', 'rb', 0) as r: + seed = r.read(16) + except Exception: + seed = str(time.time()) + self.seeded = True + self.seed_pid = os.getpid() + self.digest = None + seed = bytearray(seed) + self._stir(seed) + + def random_8(self): + with self.lock: + self._maybe_seed() + if self.digest is None or self.next_byte == self.hash_len: + self.hash.update(bytes(self.pool)) + self.digest = bytearray(self.hash.digest()) + self._stir(self.digest) + self.next_byte = 0 + value = self.digest[self.next_byte] + self.next_byte += 1 + return value + + def random_16(self): + return self.random_8() * 256 + self.random_8() + + def random_32(self): + return self.random_16() * 65536 + self.random_16() + + def random_between(self, first, last): + size = last - first + 1 + if size > 4294967296: + raise ValueError('too big') + if size > 65536: + rand = self.random_32 + max = 4294967295 + elif size > 256: + rand = self.random_16 + max = 65535 + else: + rand = self.random_8 + max = 255 + return first + size * rand() // (max + 1) + +pool = EntropyPool() + +try: + system_random = random.SystemRandom() +except Exception: # pragma: no cover + system_random = None + +def random_16(): + if system_random is not None: + return system_random.randrange(0, 65536) + else: + return pool.random_16() + +def between(first, last): + if system_random is not None: + return system_random.randrange(first, last + 1) + else: + return pool.random_between(first, last) diff --git a/venv/lib/python3.10/site-packages/dns/entropy.pyi b/venv/lib/python3.10/site-packages/dns/entropy.pyi new file mode 100644 index 0000000..818f805 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/entropy.pyi @@ -0,0 +1,10 @@ +from typing import Optional +from random import SystemRandom + +system_random : Optional[SystemRandom] + +def random_16() -> int: + pass + +def between(first: int, last: int) -> int: + pass diff --git a/venv/lib/python3.10/site-packages/dns/enum.py b/venv/lib/python3.10/site-packages/dns/enum.py new file mode 100644 index 0000000..b822dd5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/enum.py @@ -0,0 +1,90 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import enum + +class IntEnum(enum.IntEnum): + @classmethod + def _check_value(cls, value): + max = cls._maximum() + if value < 0 or value > max: + name = cls._short_name() + raise ValueError(f"{name} must be between >= 0 and <= {max}") + + @classmethod + def from_text(cls, text): + text = text.upper() + try: + return cls[text] + except KeyError: + pass + prefix = cls._prefix() + if text.startswith(prefix) and text[len(prefix):].isdigit(): + value = int(text[len(prefix):]) + cls._check_value(value) + try: + return cls(value) + except ValueError: + return value + raise cls._unknown_exception_class() + + @classmethod + def to_text(cls, value): + cls._check_value(value) + try: + return cls(value).name + except ValueError: + return f"{cls._prefix()}{value}" + + @classmethod + def make(cls, value): + """Convert text or a value into an enumerated type, if possible. + + *value*, the ``int`` or ``str`` to convert. + + Raises a class-specific exception if a ``str`` is provided that + cannot be converted. + + Raises ``ValueError`` if the value is out of range. + + Returns an enumeration from the calling class corresponding to the + value, if one is defined, or an ``int`` otherwise. + """ + + if isinstance(value, str): + return cls.from_text(value) + cls._check_value(value) + try: + return cls(value) + except ValueError: + return value + + @classmethod + def _maximum(cls): + raise NotImplementedError # pragma: no cover + + @classmethod + def _short_name(cls): + return cls.__name__.lower() + + @classmethod + def _prefix(cls): + return '' + + @classmethod + def _unknown_exception_class(cls): + return ValueError diff --git a/venv/lib/python3.10/site-packages/dns/exception.py b/venv/lib/python3.10/site-packages/dns/exception.py new file mode 100644 index 0000000..0839382 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/exception.py @@ -0,0 +1,142 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Common DNS Exceptions. + +Dnspython modules may also define their own exceptions, which will +always be subclasses of ``DNSException``. +""" + +class DNSException(Exception): + """Abstract base class shared by all dnspython exceptions. + + It supports two basic modes of operation: + + a) Old/compatible mode is used if ``__init__`` was called with + empty *kwargs*. In compatible mode all *args* are passed + to the standard Python Exception class as before and all *args* are + printed by the standard ``__str__`` implementation. Class variable + ``msg`` (or doc string if ``msg`` is ``None``) is returned from ``str()`` + if *args* is empty. + + b) New/parametrized mode is used if ``__init__`` was called with + non-empty *kwargs*. + In the new mode *args* must be empty and all kwargs must match + those set in class variable ``supp_kwargs``. All kwargs are stored inside + ``self.kwargs`` and used in a new ``__str__`` implementation to construct + a formatted message based on the ``fmt`` class variable, a ``string``. + + In the simplest case it is enough to override the ``supp_kwargs`` + and ``fmt`` class variables to get nice parametrized messages. + """ + + msg = None # non-parametrized message + supp_kwargs = set() # accepted parameters for _fmt_kwargs (sanity check) + fmt = None # message parametrized with results from _fmt_kwargs + + def __init__(self, *args, **kwargs): + self._check_params(*args, **kwargs) + if kwargs: + self.kwargs = self._check_kwargs(**kwargs) + self.msg = str(self) + else: + self.kwargs = dict() # defined but empty for old mode exceptions + if self.msg is None: + # doc string is better implicit message than empty string + self.msg = self.__doc__ + if args: + super().__init__(*args) + else: + super().__init__(self.msg) + + def _check_params(self, *args, **kwargs): + """Old exceptions supported only args and not kwargs. + + For sanity we do not allow to mix old and new behavior.""" + if args or kwargs: + assert bool(args) != bool(kwargs), \ + 'keyword arguments are mutually exclusive with positional args' + + def _check_kwargs(self, **kwargs): + if kwargs: + assert set(kwargs.keys()) == self.supp_kwargs, \ + 'following set of keyword args is required: %s' % ( + self.supp_kwargs) + return kwargs + + def _fmt_kwargs(self, **kwargs): + """Format kwargs before printing them. + + Resulting dictionary has to have keys necessary for str.format call + on fmt class variable. + """ + fmtargs = {} + for kw, data in kwargs.items(): + if isinstance(data, (list, set)): + # convert list of to list of str() + fmtargs[kw] = list(map(str, data)) + if len(fmtargs[kw]) == 1: + # remove list brackets [] from single-item lists + fmtargs[kw] = fmtargs[kw].pop() + else: + fmtargs[kw] = data + return fmtargs + + def __str__(self): + if self.kwargs and self.fmt: + # provide custom message constructed from keyword arguments + fmtargs = self._fmt_kwargs(**self.kwargs) + return self.fmt.format(**fmtargs) + else: + # print *args directly in the same way as old DNSException + return super().__str__() + + +class FormError(DNSException): + """DNS message is malformed.""" + + +class SyntaxError(DNSException): + """Text input is malformed.""" + + +class UnexpectedEnd(SyntaxError): + """Text input ended unexpectedly.""" + + +class TooBig(DNSException): + """The DNS message is too big.""" + + +class Timeout(DNSException): + """The DNS operation timed out.""" + supp_kwargs = {'timeout'} + fmt = "The DNS operation timed out after {timeout:.3f} seconds" + + +class ExceptionWrapper: + def __init__(self, exception_class): + self.exception_class = exception_class + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is not None and not isinstance(exc_val, + self.exception_class): + raise self.exception_class(str(exc_val)) from exc_val + return False diff --git a/venv/lib/python3.10/site-packages/dns/exception.pyi b/venv/lib/python3.10/site-packages/dns/exception.pyi new file mode 100644 index 0000000..dc57126 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/exception.pyi @@ -0,0 +1,12 @@ +from typing import Set, Optional, Dict + +class DNSException(Exception): + supp_kwargs : Set[str] + kwargs : Optional[Dict] + fmt : Optional[str] + +class SyntaxError(DNSException): ... +class FormError(DNSException): ... +class Timeout(DNSException): ... +class TooBig(DNSException): ... +class UnexpectedEnd(SyntaxError): ... diff --git a/venv/lib/python3.10/site-packages/dns/flags.py b/venv/lib/python3.10/site-packages/dns/flags.py new file mode 100644 index 0000000..9652287 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/flags.py @@ -0,0 +1,119 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Message Flags.""" + +import enum + +# Standard DNS flags + +class Flag(enum.IntFlag): + #: Query Response + QR = 0x8000 + #: Authoritative Answer + AA = 0x0400 + #: Truncated Response + TC = 0x0200 + #: Recursion Desired + RD = 0x0100 + #: Recursion Available + RA = 0x0080 + #: Authentic Data + AD = 0x0020 + #: Checking Disabled + CD = 0x0010 + + +# EDNS flags + +class EDNSFlag(enum.IntFlag): + #: DNSSEC answer OK + DO = 0x8000 + + +def _from_text(text, enum_class): + flags = 0 + tokens = text.split() + for t in tokens: + flags |= enum_class[t.upper()] + return flags + + +def _to_text(flags, enum_class): + text_flags = [] + for k, v in enum_class.__members__.items(): + if flags & v != 0: + text_flags.append(k) + return ' '.join(text_flags) + + +def from_text(text): + """Convert a space-separated list of flag text values into a flags + value. + + Returns an ``int`` + """ + + return _from_text(text, Flag) + + +def to_text(flags): + """Convert a flags value into a space-separated list of flag text + values. + + Returns a ``str``. + """ + + return _to_text(flags, Flag) + + +def edns_from_text(text): + """Convert a space-separated list of EDNS flag text values into a EDNS + flags value. + + Returns an ``int`` + """ + + return _from_text(text, EDNSFlag) + + +def edns_to_text(flags): + """Convert an EDNS flags value into a space-separated list of EDNS flag + text values. + + Returns a ``str``. + """ + + return _to_text(flags, EDNSFlag) + +### BEGIN generated Flag constants + +QR = Flag.QR +AA = Flag.AA +TC = Flag.TC +RD = Flag.RD +RA = Flag.RA +AD = Flag.AD +CD = Flag.CD + +### END generated Flag constants + +### BEGIN generated EDNSFlag constants + +DO = EDNSFlag.DO + +### END generated EDNSFlag constants diff --git a/venv/lib/python3.10/site-packages/dns/grange.py b/venv/lib/python3.10/site-packages/dns/grange.py new file mode 100644 index 0000000..112ede4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/grange.py @@ -0,0 +1,69 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2012-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS GENERATE range conversion.""" + +import dns + +def from_text(text): + """Convert the text form of a range in a ``$GENERATE`` statement to an + integer. + + *text*, a ``str``, the textual range in ``$GENERATE`` form. + + Returns a tuple of three ``int`` values ``(start, stop, step)``. + """ + + start = -1 + stop = -1 + step = 1 + cur = '' + state = 0 + # state 0 1 2 + # x - y / z + + if text and text[0] == '-': + raise dns.exception.SyntaxError("Start cannot be a negative number") + + for c in text: + if c == '-' and state == 0: + start = int(cur) + cur = '' + state = 1 + elif c == '/': + stop = int(cur) + cur = '' + state = 2 + elif c.isdigit(): + cur += c + else: + raise dns.exception.SyntaxError("Could not parse %s" % (c)) + + if state == 0: + raise dns.exception.SyntaxError("no stop value specified") + elif state == 1: + stop = int(cur) + else: + assert state == 2 + step = int(cur) + + assert step >= 1 + assert start >= 0 + if start > stop: + raise dns.exception.SyntaxError('start must be <= stop') + + return (start, stop, step) diff --git a/venv/lib/python3.10/site-packages/dns/immutable.py b/venv/lib/python3.10/site-packages/dns/immutable.py new file mode 100644 index 0000000..db7abbc --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/immutable.py @@ -0,0 +1,70 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import collections.abc +import sys + +# pylint: disable=unused-import +if sys.version_info >= (3, 7): + odict = dict + from dns._immutable_ctx import immutable +else: + # pragma: no cover + from collections import OrderedDict as odict + from dns._immutable_attr import immutable # noqa +# pylint: enable=unused-import + + +@immutable +class Dict(collections.abc.Mapping): + def __init__(self, dictionary, no_copy=False): + """Make an immutable dictionary from the specified dictionary. + + If *no_copy* is `True`, then *dictionary* will be wrapped instead + of copied. Only set this if you are sure there will be no external + references to the dictionary. + """ + if no_copy and isinstance(dictionary, odict): + self._odict = dictionary + else: + self._odict = odict(dictionary) + self._hash = None + + def __getitem__(self, key): + return self._odict.__getitem__(key) + + def __hash__(self): # pylint: disable=invalid-hash-returned + if self._hash is None: + h = 0 + for key in sorted(self._odict.keys()): + h ^= hash(key) + object.__setattr__(self, '_hash', h) + # this does return an int, but pylint doesn't figure that out + return self._hash + + def __len__(self): + return len(self._odict) + + def __iter__(self): + return iter(self._odict) + + +def constify(o): + """ + Convert mutable types to immutable types. + """ + if isinstance(o, bytearray): + return bytes(o) + if isinstance(o, tuple): + try: + hash(o) + return o + except Exception: + return tuple(constify(elt) for elt in o) + if isinstance(o, list): + return tuple(constify(elt) for elt in o) + if isinstance(o, dict): + cdict = odict() + for k, v in o.items(): + cdict[k] = constify(v) + return Dict(cdict, True) + return o diff --git a/venv/lib/python3.10/site-packages/dns/inet.py b/venv/lib/python3.10/site-packages/dns/inet.py new file mode 100644 index 0000000..d3bdc64 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/inet.py @@ -0,0 +1,170 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Generic Internet address helper functions.""" + +import socket + +import dns.ipv4 +import dns.ipv6 + + +# We assume that AF_INET and AF_INET6 are always defined. We keep +# these here for the benefit of any old code (unlikely though that +# is!). +AF_INET = socket.AF_INET +AF_INET6 = socket.AF_INET6 + + +def inet_pton(family, text): + """Convert the textual form of a network address into its binary form. + + *family* is an ``int``, the address family. + + *text* is a ``str``, the textual address. + + Raises ``NotImplementedError`` if the address family specified is not + implemented. + + Returns a ``bytes``. + """ + + if family == AF_INET: + return dns.ipv4.inet_aton(text) + elif family == AF_INET6: + return dns.ipv6.inet_aton(text, True) + else: + raise NotImplementedError + + +def inet_ntop(family, address): + """Convert the binary form of a network address into its textual form. + + *family* is an ``int``, the address family. + + *address* is a ``bytes``, the network address in binary form. + + Raises ``NotImplementedError`` if the address family specified is not + implemented. + + Returns a ``str``. + """ + + if family == AF_INET: + return dns.ipv4.inet_ntoa(address) + elif family == AF_INET6: + return dns.ipv6.inet_ntoa(address) + else: + raise NotImplementedError + + +def af_for_address(text): + """Determine the address family of a textual-form network address. + + *text*, a ``str``, the textual address. + + Raises ``ValueError`` if the address family cannot be determined + from the input. + + Returns an ``int``. + """ + + try: + dns.ipv4.inet_aton(text) + return AF_INET + except Exception: + try: + dns.ipv6.inet_aton(text, True) + return AF_INET6 + except Exception: + raise ValueError + + +def is_multicast(text): + """Is the textual-form network address a multicast address? + + *text*, a ``str``, the textual address. + + Raises ``ValueError`` if the address family cannot be determined + from the input. + + Returns a ``bool``. + """ + + try: + first = dns.ipv4.inet_aton(text)[0] + return first >= 224 and first <= 239 + except Exception: + try: + first = dns.ipv6.inet_aton(text, True)[0] + return first == 255 + except Exception: + raise ValueError + + +def is_address(text): + """Is the specified string an IPv4 or IPv6 address? + + *text*, a ``str``, the textual address. + + Returns a ``bool``. + """ + + try: + dns.ipv4.inet_aton(text) + return True + except Exception: + try: + dns.ipv6.inet_aton(text, True) + return True + except Exception: + return False + + +def low_level_address_tuple(high_tuple, af=None): + """Given a "high-level" address tuple, i.e. + an (address, port) return the appropriate "low-level" address tuple + suitable for use in socket calls. + + If an *af* other than ``None`` is provided, it is assumed the + address in the high-level tuple is valid and has that af. If af + is ``None``, then af_for_address will be called. + + """ + address, port = high_tuple + if af is None: + af = af_for_address(address) + if af == AF_INET: + return (address, port) + elif af == AF_INET6: + i = address.find('%') + if i < 0: + # no scope, shortcut! + return (address, port, 0, 0) + # try to avoid getaddrinfo() + addrpart = address[:i] + scope = address[i + 1:] + if scope.isdigit(): + return (addrpart, port, 0, int(scope)) + try: + return (addrpart, port, 0, socket.if_nametoindex(scope)) + except AttributeError: # pragma: no cover (we can't really test this) + ai_flags = socket.AI_NUMERICHOST + ((*_, tup), *_) = socket.getaddrinfo(address, port, flags=ai_flags) + return tup + else: + raise NotImplementedError(f'unknown address family {af}') diff --git a/venv/lib/python3.10/site-packages/dns/inet.pyi b/venv/lib/python3.10/site-packages/dns/inet.pyi new file mode 100644 index 0000000..6d9dcc7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/inet.pyi @@ -0,0 +1,4 @@ +from typing import Union +from socket import AddressFamily + +AF_INET6 : Union[int, AddressFamily] diff --git a/venv/lib/python3.10/site-packages/dns/ipv4.py b/venv/lib/python3.10/site-packages/dns/ipv4.py new file mode 100644 index 0000000..e1f38d3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/ipv4.py @@ -0,0 +1,60 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""IPv4 helper functions.""" + +import struct + +import dns.exception + +def inet_ntoa(address): + """Convert an IPv4 address in binary form to text form. + + *address*, a ``bytes``, the IPv4 address in binary form. + + Returns a ``str``. + """ + + if len(address) != 4: + raise dns.exception.SyntaxError + return ('%u.%u.%u.%u' % (address[0], address[1], + address[2], address[3])) + +def inet_aton(text): + """Convert an IPv4 address in text form to binary form. + + *text*, a ``str``, the IPv4 address in textual form. + + Returns a ``bytes``. + """ + + if not isinstance(text, bytes): + text = text.encode() + parts = text.split(b'.') + if len(parts) != 4: + raise dns.exception.SyntaxError + for part in parts: + if not part.isdigit(): + raise dns.exception.SyntaxError + if len(part) > 1 and part[0] == ord('0'): + # No leading zeros + raise dns.exception.SyntaxError + try: + b = [int(part) for part in parts] + return struct.pack('BBBB', *b) + except Exception: + raise dns.exception.SyntaxError diff --git a/venv/lib/python3.10/site-packages/dns/ipv6.py b/venv/lib/python3.10/site-packages/dns/ipv6.py new file mode 100644 index 0000000..0db6fcf --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/ipv6.py @@ -0,0 +1,197 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""IPv6 helper functions.""" + +import re +import binascii + +import dns.exception +import dns.ipv4 + +_leading_zero = re.compile(r'0+([0-9a-f]+)') + +def inet_ntoa(address): + """Convert an IPv6 address in binary form to text form. + + *address*, a ``bytes``, the IPv6 address in binary form. + + Raises ``ValueError`` if the address isn't 16 bytes long. + Returns a ``str``. + """ + + if len(address) != 16: + raise ValueError("IPv6 addresses are 16 bytes long") + hex = binascii.hexlify(address) + chunks = [] + i = 0 + l = len(hex) + while i < l: + chunk = hex[i:i + 4].decode() + # strip leading zeros. we do this with an re instead of + # with lstrip() because lstrip() didn't support chars until + # python 2.2.2 + m = _leading_zero.match(chunk) + if m is not None: + chunk = m.group(1) + chunks.append(chunk) + i += 4 + # + # Compress the longest subsequence of 0-value chunks to :: + # + best_start = 0 + best_len = 0 + start = -1 + last_was_zero = False + for i in range(8): + if chunks[i] != '0': + if last_was_zero: + end = i + current_len = end - start + if current_len > best_len: + best_start = start + best_len = current_len + last_was_zero = False + elif not last_was_zero: + start = i + last_was_zero = True + if last_was_zero: + end = 8 + current_len = end - start + if current_len > best_len: + best_start = start + best_len = current_len + if best_len > 1: + if best_start == 0 and \ + (best_len == 6 or + best_len == 5 and chunks[5] == 'ffff'): + # We have an embedded IPv4 address + if best_len == 6: + prefix = '::' + else: + prefix = '::ffff:' + hex = prefix + dns.ipv4.inet_ntoa(address[12:]) + else: + hex = ':'.join(chunks[:best_start]) + '::' + \ + ':'.join(chunks[best_start + best_len:]) + else: + hex = ':'.join(chunks) + return hex + +_v4_ending = re.compile(br'(.*):(\d+\.\d+\.\d+\.\d+)$') +_colon_colon_start = re.compile(br'::.*') +_colon_colon_end = re.compile(br'.*::$') + +def inet_aton(text, ignore_scope=False): + """Convert an IPv6 address in text form to binary form. + + *text*, a ``str``, the IPv6 address in textual form. + + *ignore_scope*, a ``bool``. If ``True``, a scope will be ignored. + If ``False``, the default, it is an error for a scope to be present. + + Returns a ``bytes``. + """ + + # + # Our aim here is not something fast; we just want something that works. + # + if not isinstance(text, bytes): + text = text.encode() + + if ignore_scope: + parts = text.split(b'%') + l = len(parts) + if l == 2: + text = parts[0] + elif l > 2: + raise dns.exception.SyntaxError + + if text == b'': + raise dns.exception.SyntaxError + elif text.endswith(b':') and not text.endswith(b'::'): + raise dns.exception.SyntaxError + elif text.startswith(b':') and not text.startswith(b'::'): + raise dns.exception.SyntaxError + elif text == b'::': + text = b'0::' + # + # Get rid of the icky dot-quad syntax if we have it. + # + m = _v4_ending.match(text) + if m is not None: + b = dns.ipv4.inet_aton(m.group(2)) + text = ("{}:{:02x}{:02x}:{:02x}{:02x}".format(m.group(1).decode(), + b[0], b[1], b[2], + b[3])).encode() + # + # Try to turn '::' into ':'; if no match try to + # turn '::' into ':' + # + m = _colon_colon_start.match(text) + if m is not None: + text = text[1:] + else: + m = _colon_colon_end.match(text) + if m is not None: + text = text[:-1] + # + # Now canonicalize into 8 chunks of 4 hex digits each + # + chunks = text.split(b':') + l = len(chunks) + if l > 8: + raise dns.exception.SyntaxError + seen_empty = False + canonical = [] + for c in chunks: + if c == b'': + if seen_empty: + raise dns.exception.SyntaxError + seen_empty = True + for _ in range(0, 8 - l + 1): + canonical.append(b'0000') + else: + lc = len(c) + if lc > 4: + raise dns.exception.SyntaxError + if lc != 4: + c = (b'0' * (4 - lc)) + c + canonical.append(c) + if l < 8 and not seen_empty: + raise dns.exception.SyntaxError + text = b''.join(canonical) + + # + # Finally we can go to binary. + # + try: + return binascii.unhexlify(text) + except (binascii.Error, TypeError): + raise dns.exception.SyntaxError + +_mapped_prefix = b'\x00' * 10 + b'\xff\xff' + +def is_mapped(address): + """Is the specified address a mapped IPv4 address? + + *address*, a ``bytes`` is an IPv6 address in binary form. + + Returns a ``bool``. + """ + + return address.startswith(_mapped_prefix) diff --git a/venv/lib/python3.10/site-packages/dns/message.py b/venv/lib/python3.10/site-packages/dns/message.py new file mode 100644 index 0000000..c2751a9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/message.py @@ -0,0 +1,1558 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Messages""" + +import contextlib +import io +import time + +import dns.wire +import dns.edns +import dns.enum +import dns.exception +import dns.flags +import dns.name +import dns.opcode +import dns.entropy +import dns.rcode +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.rrset +import dns.renderer +import dns.ttl +import dns.tsig +import dns.rdtypes.ANY.OPT +import dns.rdtypes.ANY.TSIG + + +class ShortHeader(dns.exception.FormError): + """The DNS packet passed to from_wire() is too short.""" + + +class TrailingJunk(dns.exception.FormError): + """The DNS packet passed to from_wire() has extra junk at the end of it.""" + + +class UnknownHeaderField(dns.exception.DNSException): + """The header field name was not recognized when converting from text + into a message.""" + + +class BadEDNS(dns.exception.FormError): + """An OPT record occurred somewhere other than + the additional data section.""" + + +class BadTSIG(dns.exception.FormError): + """A TSIG record occurred somewhere other than the end of + the additional data section.""" + + +class UnknownTSIGKey(dns.exception.DNSException): + """A TSIG with an unknown key was received.""" + + +class Truncated(dns.exception.DNSException): + """The truncated flag is set.""" + + supp_kwargs = {'message'} + + def message(self): + """As much of the message as could be processed. + + Returns a ``dns.message.Message``. + """ + return self.kwargs['message'] + + +class NotQueryResponse(dns.exception.DNSException): + """Message is not a response to a query.""" + + +class ChainTooLong(dns.exception.DNSException): + """The CNAME chain is too long.""" + + +class AnswerForNXDOMAIN(dns.exception.DNSException): + """The rcode is NXDOMAIN but an answer was found.""" + +class NoPreviousName(dns.exception.SyntaxError): + """No previous name was known.""" + + +class MessageSection(dns.enum.IntEnum): + """Message sections""" + QUESTION = 0 + ANSWER = 1 + AUTHORITY = 2 + ADDITIONAL = 3 + + @classmethod + def _maximum(cls): + return 3 + + +class MessageError: + def __init__(self, exception, offset): + self.exception = exception + self.offset = offset + + +DEFAULT_EDNS_PAYLOAD = 1232 +MAX_CHAIN = 16 + +class Message: + """A DNS message.""" + + _section_enum = MessageSection + + def __init__(self, id=None): + if id is None: + self.id = dns.entropy.random_16() + else: + self.id = id + self.flags = 0 + self.sections = [[], [], [], []] + self.opt = None + self.request_payload = 0 + self.keyring = None + self.tsig = None + self.request_mac = b'' + self.xfr = False + self.origin = None + self.tsig_ctx = None + self.index = {} + self.errors = [] + + @property + def question(self): + """ The question section.""" + return self.sections[0] + + @question.setter + def question(self, v): + self.sections[0] = v + + @property + def answer(self): + """ The answer section.""" + return self.sections[1] + + @answer.setter + def answer(self, v): + self.sections[1] = v + + @property + def authority(self): + """ The authority section.""" + return self.sections[2] + + @authority.setter + def authority(self, v): + self.sections[2] = v + + @property + def additional(self): + """ The additional data section.""" + return self.sections[3] + + @additional.setter + def additional(self, v): + self.sections[3] = v + + def __repr__(self): + return '' + + def __str__(self): + return self.to_text() + + def to_text(self, origin=None, relativize=True, **kw): + """Convert the message to text. + + The *origin*, *relativize*, and any other keyword + arguments are passed to the RRset ``to_wire()`` method. + + Returns a ``str``. + """ + + s = io.StringIO() + s.write('id %d\n' % self.id) + s.write('opcode %s\n' % dns.opcode.to_text(self.opcode())) + s.write('rcode %s\n' % dns.rcode.to_text(self.rcode())) + s.write('flags %s\n' % dns.flags.to_text(self.flags)) + if self.edns >= 0: + s.write('edns %s\n' % self.edns) + if self.ednsflags != 0: + s.write('eflags %s\n' % + dns.flags.edns_to_text(self.ednsflags)) + s.write('payload %d\n' % self.payload) + for opt in self.options: + s.write('option %s\n' % opt.to_text()) + for (name, which) in self._section_enum.__members__.items(): + s.write(f';{name}\n') + for rrset in self.section_from_number(which): + s.write(rrset.to_text(origin, relativize, **kw)) + s.write('\n') + # + # We strip off the final \n so the caller can print the result without + # doing weird things to get around eccentricities in Python print + # formatting + # + return s.getvalue()[:-1] + + def __eq__(self, other): + """Two messages are equal if they have the same content in the + header, question, answer, and authority sections. + + Returns a ``bool``. + """ + + if not isinstance(other, Message): + return False + if self.id != other.id: + return False + if self.flags != other.flags: + return False + for i, section in enumerate(self.sections): + other_section = other.sections[i] + for n in section: + if n not in other_section: + return False + for n in other_section: + if n not in section: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def is_response(self, other): + """Is *other*, also a ``dns.message.Message``, a response to this + message? + + Returns a ``bool``. + """ + + if other.flags & dns.flags.QR == 0 or \ + self.id != other.id or \ + dns.opcode.from_flags(self.flags) != \ + dns.opcode.from_flags(other.flags): + return False + if other.rcode() in {dns.rcode.FORMERR, dns.rcode.SERVFAIL, + dns.rcode.NOTIMP, dns.rcode.REFUSED}: + # We don't check the question section in these cases if + # the other question section is empty, even though they + # still really ought to have a question section. + if len(other.question) == 0: + return True + if dns.opcode.is_update(self.flags): + # This is assuming the "sender doesn't include anything + # from the update", but we don't care to check the other + # case, which is that all the sections are returned and + # identical. + return True + for n in self.question: + if n not in other.question: + return False + for n in other.question: + if n not in self.question: + return False + return True + + def section_number(self, section): + """Return the "section number" of the specified section for use + in indexing. + + *section* is one of the section attributes of this message. + + Raises ``ValueError`` if the section isn't known. + + Returns an ``int``. + """ + + for i, our_section in enumerate(self.sections): + if section is our_section: + return self._section_enum(i) + raise ValueError('unknown section') + + def section_from_number(self, number): + """Return the section list associated with the specified section + number. + + *number* is a section number `int` or the text form of a section + name. + + Raises ``ValueError`` if the section isn't known. + + Returns a ``list``. + """ + + section = self._section_enum.make(number) + return self.sections[section] + + def find_rrset(self, section, name, rdclass, rdtype, + covers=dns.rdatatype.NONE, deleting=None, create=False, + force_unique=False): + """Find the RRset with the given attributes in the specified section. + + *section*, an ``int`` section number, or one of the section + attributes of this message. This specifies the + the section of the message to search. For example:: + + my_message.find_rrset(my_message.answer, name, rdclass, rdtype) + my_message.find_rrset(dns.message.ANSWER, name, rdclass, rdtype) + + *name*, a ``dns.name.Name``, the name of the RRset. + + *rdclass*, an ``int``, the class of the RRset. + + *rdtype*, an ``int``, the type of the RRset. + + *covers*, an ``int`` or ``None``, the covers value of the RRset. + The default is ``None``. + + *deleting*, an ``int`` or ``None``, the deleting value of the RRset. + The default is ``None``. + + *create*, a ``bool``. If ``True``, create the RRset if it is not found. + The created RRset is appended to *section*. + + *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, + create a new RRset regardless of whether a matching RRset exists + already. The default is ``False``. This is useful when creating + DDNS Update messages, as order matters for them. + + Raises ``KeyError`` if the RRset was not found and create was + ``False``. + + Returns a ``dns.rrset.RRset object``. + """ + + if isinstance(section, int): + section_number = section + section = self.section_from_number(section_number) + else: + section_number = self.section_number(section) + key = (section_number, name, rdclass, rdtype, covers, deleting) + if not force_unique: + if self.index is not None: + rrset = self.index.get(key) + if rrset is not None: + return rrset + else: + for rrset in section: + if rrset.full_match(name, rdclass, rdtype, covers, + deleting): + return rrset + if not create: + raise KeyError + rrset = dns.rrset.RRset(name, rdclass, rdtype, covers, deleting) + section.append(rrset) + if self.index is not None: + self.index[key] = rrset + return rrset + + def get_rrset(self, section, name, rdclass, rdtype, + covers=dns.rdatatype.NONE, deleting=None, create=False, + force_unique=False): + """Get the RRset with the given attributes in the specified section. + + If the RRset is not found, None is returned. + + *section*, an ``int`` section number, or one of the section + attributes of this message. This specifies the + the section of the message to search. For example:: + + my_message.get_rrset(my_message.answer, name, rdclass, rdtype) + my_message.get_rrset(dns.message.ANSWER, name, rdclass, rdtype) + + *name*, a ``dns.name.Name``, the name of the RRset. + + *rdclass*, an ``int``, the class of the RRset. + + *rdtype*, an ``int``, the type of the RRset. + + *covers*, an ``int`` or ``None``, the covers value of the RRset. + The default is ``None``. + + *deleting*, an ``int`` or ``None``, the deleting value of the RRset. + The default is ``None``. + + *create*, a ``bool``. If ``True``, create the RRset if it is not found. + The created RRset is appended to *section*. + + *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, + create a new RRset regardless of whether a matching RRset exists + already. The default is ``False``. This is useful when creating + DDNS Update messages, as order matters for them. + + Returns a ``dns.rrset.RRset object`` or ``None``. + """ + + try: + rrset = self.find_rrset(section, name, rdclass, rdtype, covers, + deleting, create, force_unique) + except KeyError: + rrset = None + return rrset + + def to_wire(self, origin=None, max_size=0, multi=False, tsig_ctx=None, + **kw): + """Return a string containing the message in DNS compressed wire + format. + + Additional keyword arguments are passed to the RRset ``to_wire()`` + method. + + *origin*, a ``dns.name.Name`` or ``None``, the origin to be appended + to any relative names. If ``None``, and the message has an origin + attribute that is not ``None``, then it will be used. + + *max_size*, an ``int``, the maximum size of the wire format + output; default is 0, which means "the message's request + payload, if nonzero, or 65535". + + *multi*, a ``bool``, should be set to ``True`` if this message is + part of a multiple message sequence. + + *tsig_ctx*, a ``dns.tsig.HMACTSig`` or ``dns.tsig.GSSTSig`` object, the + ongoing TSIG context, used when signing zone transfers. + + Raises ``dns.exception.TooBig`` if *max_size* was exceeded. + + Returns a ``bytes``. + """ + + if origin is None and self.origin is not None: + origin = self.origin + if max_size == 0: + if self.request_payload != 0: + max_size = self.request_payload + else: + max_size = 65535 + if max_size < 512: + max_size = 512 + elif max_size > 65535: + max_size = 65535 + r = dns.renderer.Renderer(self.id, self.flags, max_size, origin) + for rrset in self.question: + r.add_question(rrset.name, rrset.rdtype, rrset.rdclass) + for rrset in self.answer: + r.add_rrset(dns.renderer.ANSWER, rrset, **kw) + for rrset in self.authority: + r.add_rrset(dns.renderer.AUTHORITY, rrset, **kw) + if self.opt is not None: + r.add_rrset(dns.renderer.ADDITIONAL, self.opt) + for rrset in self.additional: + r.add_rrset(dns.renderer.ADDITIONAL, rrset, **kw) + r.write_header() + if self.tsig is not None: + (new_tsig, ctx) = dns.tsig.sign(r.get_wire(), + self.keyring, + self.tsig[0], + int(time.time()), + self.request_mac, + tsig_ctx, + multi) + self.tsig.clear() + self.tsig.add(new_tsig) + r.add_rrset(dns.renderer.ADDITIONAL, self.tsig) + r.write_header() + if multi: + self.tsig_ctx = ctx + return r.get_wire() + + @staticmethod + def _make_tsig(keyname, algorithm, time_signed, fudge, mac, original_id, + error, other): + tsig = dns.rdtypes.ANY.TSIG.TSIG(dns.rdataclass.ANY, dns.rdatatype.TSIG, + algorithm, time_signed, fudge, mac, + original_id, error, other) + return dns.rrset.from_rdata(keyname, 0, tsig) + + def use_tsig(self, keyring, keyname=None, fudge=300, + original_id=None, tsig_error=0, other_data=b'', + algorithm=dns.tsig.default_algorithm): + """When sending, a TSIG signature using the specified key + should be added. + + *key*, a ``dns.tsig.Key`` is the key to use. If a key is specified, + the *keyring* and *algorithm* fields are not used. + + *keyring*, a ``dict``, ``callable`` or ``dns.tsig.Key``, is either + the TSIG keyring or key to use. + + The format of a keyring dict is a mapping from TSIG key name, as + ``dns.name.Name`` to ``dns.tsig.Key`` or a TSIG secret, a ``bytes``. + If a ``dict`` *keyring* is specified but a *keyname* is not, the key + used will be the first key in the *keyring*. Note that the order of + keys in a dictionary is not defined, so applications should supply a + keyname when a ``dict`` keyring is used, unless they know the keyring + contains only one key. If a ``callable`` keyring is specified, the + callable will be called with the message and the keyname, and is + expected to return a key. + + *keyname*, a ``dns.name.Name``, ``str`` or ``None``, the name of + this TSIG key to use; defaults to ``None``. If *keyring* is a + ``dict``, the key must be defined in it. If *keyring* is a + ``dns.tsig.Key``, this is ignored. + + *fudge*, an ``int``, the TSIG time fudge. + + *original_id*, an ``int``, the TSIG original id. If ``None``, + the message's id is used. + + *tsig_error*, an ``int``, the TSIG error code. + + *other_data*, a ``bytes``, the TSIG other data. + + *algorithm*, a ``dns.name.Name``, the TSIG algorithm to use. This is + only used if *keyring* is a ``dict``, and the key entry is a ``bytes``. + """ + + if isinstance(keyring, dns.tsig.Key): + key = keyring + keyname = key.name + elif callable(keyring): + key = keyring(self, keyname) + else: + if isinstance(keyname, str): + keyname = dns.name.from_text(keyname) + if keyname is None: + keyname = next(iter(keyring)) + key = keyring[keyname] + if isinstance(key, bytes): + key = dns.tsig.Key(keyname, key, algorithm) + self.keyring = key + if original_id is None: + original_id = self.id + self.tsig = self._make_tsig(keyname, self.keyring.algorithm, 0, fudge, + b'', original_id, tsig_error, other_data) + + @property + def keyname(self): + if self.tsig: + return self.tsig.name + else: + return None + + @property + def keyalgorithm(self): + if self.tsig: + return self.tsig[0].algorithm + else: + return None + + @property + def mac(self): + if self.tsig: + return self.tsig[0].mac + else: + return None + + @property + def tsig_error(self): + if self.tsig: + return self.tsig[0].error + else: + return None + + @property + def had_tsig(self): + return bool(self.tsig) + + @staticmethod + def _make_opt(flags=0, payload=DEFAULT_EDNS_PAYLOAD, options=None): + opt = dns.rdtypes.ANY.OPT.OPT(payload, dns.rdatatype.OPT, + options or ()) + return dns.rrset.from_rdata(dns.name.root, int(flags), opt) + + def use_edns(self, edns=0, ednsflags=0, payload=DEFAULT_EDNS_PAYLOAD, + request_payload=None, options=None): + """Configure EDNS behavior. + + *edns*, an ``int``, is the EDNS level to use. Specifying + ``None``, ``False``, or ``-1`` means "do not use EDNS", and in this case + the other parameters are ignored. Specifying ``True`` is + equivalent to specifying 0, i.e. "use EDNS0". + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the + maximum size of UDP datagram the sender can handle. I.e. how big + a response to this message can be. + + *request_payload*, an ``int``, is the EDNS payload size to use when + sending this message. If not specified, defaults to the value of + *payload*. + + *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS + options. + """ + + if edns is None or edns is False: + edns = -1 + elif edns is True: + edns = 0 + if edns < 0: + self.opt = None + self.request_payload = 0 + else: + # make sure the EDNS version in ednsflags agrees with edns + ednsflags &= 0xFF00FFFF + ednsflags |= (edns << 16) + if options is None: + options = [] + self.opt = self._make_opt(ednsflags, payload, options) + if request_payload is None: + request_payload = payload + self.request_payload = request_payload + + @property + def edns(self): + if self.opt: + return (self.ednsflags & 0xff0000) >> 16 + else: + return -1 + + @property + def ednsflags(self): + if self.opt: + return self.opt.ttl + else: + return 0 + + @ednsflags.setter + def ednsflags(self, v): + if self.opt: + self.opt.ttl = v + elif v: + self.opt = self._make_opt(v) + + @property + def payload(self): + if self.opt: + return self.opt[0].payload + else: + return 0 + + @property + def options(self): + if self.opt: + return self.opt[0].options + else: + return () + + def want_dnssec(self, wanted=True): + """Enable or disable 'DNSSEC desired' flag in requests. + + *wanted*, a ``bool``. If ``True``, then DNSSEC data is + desired in the response, EDNS is enabled if required, and then + the DO bit is set. If ``False``, the DO bit is cleared if + EDNS is enabled. + """ + + if wanted: + self.ednsflags |= dns.flags.DO + elif self.opt: + self.ednsflags &= ~dns.flags.DO + + def rcode(self): + """Return the rcode. + + Returns an ``int``. + """ + return dns.rcode.from_flags(int(self.flags), int(self.ednsflags)) + + def set_rcode(self, rcode): + """Set the rcode. + + *rcode*, an ``int``, is the rcode to set. + """ + (value, evalue) = dns.rcode.to_flags(rcode) + self.flags &= 0xFFF0 + self.flags |= value + self.ednsflags &= 0x00FFFFFF + self.ednsflags |= evalue + + def opcode(self): + """Return the opcode. + + Returns an ``int``. + """ + return dns.opcode.from_flags(int(self.flags)) + + def set_opcode(self, opcode): + """Set the opcode. + + *opcode*, an ``int``, is the opcode to set. + """ + self.flags &= 0x87FF + self.flags |= dns.opcode.to_flags(opcode) + + def _get_one_rr_per_rrset(self, value): + # What the caller picked is fine. + return value + + # pylint: disable=unused-argument + + def _parse_rr_header(self, section, name, rdclass, rdtype): + return (rdclass, rdtype, None, False) + + # pylint: enable=unused-argument + + def _parse_special_rr_header(self, section, count, position, + name, rdclass, rdtype): + if rdtype == dns.rdatatype.OPT: + if section != MessageSection.ADDITIONAL or self.opt or \ + name != dns.name.root: + raise BadEDNS + elif rdtype == dns.rdatatype.TSIG: + if section != MessageSection.ADDITIONAL or \ + rdclass != dns.rdatatype.ANY or \ + position != count - 1: + raise BadTSIG + return (rdclass, rdtype, None, False) + + +class ChainingResult: + """The result of a call to dns.message.QueryMessage.resolve_chaining(). + + The ``answer`` attribute is the answer RRSet, or ``None`` if it doesn't + exist. + + The ``canonical_name`` attribute is the canonical name after all + chaining has been applied (this is the name as ``rrset.name`` in cases + where rrset is not ``None``). + + The ``minimum_ttl`` attribute is the minimum TTL, i.e. the TTL to + use if caching the data. It is the smallest of all the CNAME TTLs + and either the answer TTL if it exists or the SOA TTL and SOA + minimum values for negative answers. + + The ``cnames`` attribute is a list of all the CNAME RRSets followed to + get to the canonical name. + """ + def __init__(self, canonical_name, answer, minimum_ttl, cnames): + self.canonical_name = canonical_name + self.answer = answer + self.minimum_ttl = minimum_ttl + self.cnames = cnames + + +class QueryMessage(Message): + def resolve_chaining(self): + """Follow the CNAME chain in the response to determine the answer + RRset. + + Raises ``dns.message.NotQueryResponse`` if the message is not + a response. + + Raises ``dns.message.ChainTooLong`` if the CNAME chain is too long. + + Raises ``dns.message.AnswerForNXDOMAIN`` if the rcode is NXDOMAIN + but an answer was found. + + Raises ``dns.exception.FormError`` if the question count is not 1. + + Returns a ChainingResult object. + """ + if self.flags & dns.flags.QR == 0: + raise NotQueryResponse + if len(self.question) != 1: + raise dns.exception.FormError + question = self.question[0] + qname = question.name + min_ttl = dns.ttl.MAX_TTL + answer = None + count = 0 + cnames = [] + while count < MAX_CHAIN: + try: + answer = self.find_rrset(self.answer, qname, question.rdclass, + question.rdtype) + min_ttl = min(min_ttl, answer.ttl) + break + except KeyError: + if question.rdtype != dns.rdatatype.CNAME: + try: + crrset = self.find_rrset(self.answer, qname, + question.rdclass, + dns.rdatatype.CNAME) + cnames.append(crrset) + min_ttl = min(min_ttl, crrset.ttl) + for rd in crrset: + qname = rd.target + break + count += 1 + continue + except KeyError: + # Exit the chaining loop + break + else: + # Exit the chaining loop + break + if count >= MAX_CHAIN: + raise ChainTooLong + if self.rcode() == dns.rcode.NXDOMAIN and answer is not None: + raise AnswerForNXDOMAIN + if answer is None: + # Further minimize the TTL with NCACHE. + auname = qname + while True: + # Look for an SOA RR whose owner name is a superdomain + # of qname. + try: + srrset = self.find_rrset(self.authority, auname, + question.rdclass, + dns.rdatatype.SOA) + min_ttl = min(min_ttl, srrset.ttl, srrset[0].minimum) + break + except KeyError: + try: + auname = auname.parent() + except dns.name.NoParent: + break + return ChainingResult(qname, answer, min_ttl, cnames) + + def canonical_name(self): + """Return the canonical name of the first name in the question + section. + + Raises ``dns.message.NotQueryResponse`` if the message is not + a response. + + Raises ``dns.message.ChainTooLong`` if the CNAME chain is too long. + + Raises ``dns.message.AnswerForNXDOMAIN`` if the rcode is NXDOMAIN + but an answer was found. + + Raises ``dns.exception.FormError`` if the question count is not 1. + """ + return self.resolve_chaining().canonical_name + + +def _maybe_import_update(): + # We avoid circular imports by doing this here. We do it in another + # function as doing it in _message_factory_from_opcode() makes "dns" + # a local symbol, and the first line fails :) + + # pylint: disable=redefined-outer-name,import-outside-toplevel,unused-import + import dns.update # noqa: F401 + + +def _message_factory_from_opcode(opcode): + if opcode == dns.opcode.QUERY: + return QueryMessage + elif opcode == dns.opcode.UPDATE: + _maybe_import_update() + return dns.update.UpdateMessage + else: + return Message + + +class _WireReader: + + """Wire format reader. + + parser: the binary parser + message: The message object being built + initialize_message: Callback to set message parsing options + question_only: Are we only reading the question? + one_rr_per_rrset: Put each RR into its own RRset? + keyring: TSIG keyring + ignore_trailing: Ignore trailing junk at end of request? + multi: Is this message part of a multi-message sequence? + DNS dynamic updates. + continue_on_error: try to extract as much information as possible from + the message, accumulating MessageErrors in the *errors* attribute instead of + raising them. + """ + + def __init__(self, wire, initialize_message, question_only=False, + one_rr_per_rrset=False, ignore_trailing=False, + keyring=None, multi=False, continue_on_error=False): + self.parser = dns.wire.Parser(wire) + self.message = None + self.initialize_message = initialize_message + self.question_only = question_only + self.one_rr_per_rrset = one_rr_per_rrset + self.ignore_trailing = ignore_trailing + self.keyring = keyring + self.multi = multi + self.continue_on_error = continue_on_error + self.errors = [] + + def _get_question(self, section_number, qcount): + """Read the next *qcount* records from the wire data and add them to + the question section. + """ + + section = self.message.sections[section_number] + for _ in range(qcount): + qname = self.parser.get_name(self.message.origin) + (rdtype, rdclass) = self.parser.get_struct('!HH') + (rdclass, rdtype, _, _) = \ + self.message._parse_rr_header(section_number, qname, rdclass, + rdtype) + self.message.find_rrset(section, qname, rdclass, rdtype, + create=True, force_unique=True) + + def _add_error(self, e): + self.errors.append(MessageError(e, self.parser.current)) + + def _get_section(self, section_number, count): + """Read the next I{count} records from the wire data and add them to + the specified section. + + section_number: the section of the message to which to add records + count: the number of records to read + """ + + section = self.message.sections[section_number] + force_unique = self.one_rr_per_rrset + for i in range(count): + rr_start = self.parser.current + absolute_name = self.parser.get_name() + if self.message.origin is not None: + name = absolute_name.relativize(self.message.origin) + else: + name = absolute_name + (rdtype, rdclass, ttl, rdlen) = self.parser.get_struct('!HHIH') + if rdtype in (dns.rdatatype.OPT, dns.rdatatype.TSIG): + (rdclass, rdtype, deleting, empty) = \ + self.message._parse_special_rr_header(section_number, + count, i, name, + rdclass, rdtype) + else: + (rdclass, rdtype, deleting, empty) = \ + self.message._parse_rr_header(section_number, + name, rdclass, rdtype) + try: + rdata_start = self.parser.current + if empty: + if rdlen > 0: + raise dns.exception.FormError + rd = None + covers = dns.rdatatype.NONE + else: + with self.parser.restrict_to(rdlen): + rd = dns.rdata.from_wire_parser(rdclass, rdtype, + self.parser, + self.message.origin) + covers = rd.covers() + if self.message.xfr and rdtype == dns.rdatatype.SOA: + force_unique = True + if rdtype == dns.rdatatype.OPT: + self.message.opt = dns.rrset.from_rdata(name, ttl, rd) + elif rdtype == dns.rdatatype.TSIG: + if self.keyring is None: + raise UnknownTSIGKey('got signed message without ' + 'keyring') + if isinstance(self.keyring, dict): + key = self.keyring.get(absolute_name) + if isinstance(key, bytes): + key = dns.tsig.Key(absolute_name, key, rd.algorithm) + elif callable(self.keyring): + key = self.keyring(self.message, absolute_name) + else: + key = self.keyring + if key is None: + raise UnknownTSIGKey("key '%s' unknown" % name) + self.message.keyring = key + self.message.tsig_ctx = \ + dns.tsig.validate(self.parser.wire, + key, + absolute_name, + rd, + int(time.time()), + self.message.request_mac, + rr_start, + self.message.tsig_ctx, + self.multi) + self.message.tsig = dns.rrset.from_rdata(absolute_name, 0, + rd) + else: + rrset = self.message.find_rrset(section, name, + rdclass, rdtype, covers, + deleting, True, + force_unique) + if rd is not None: + if ttl > 0x7fffffff: + ttl = 0 + rrset.add(rd, ttl) + except Exception as e: + if self.continue_on_error: + self._add_error(e) + self.parser.seek(rdata_start + rdlen) + else: + raise + + def read(self): + """Read a wire format DNS message and build a dns.message.Message + object.""" + + if self.parser.remaining() < 12: + raise ShortHeader + (id, flags, qcount, ancount, aucount, adcount) = \ + self.parser.get_struct('!HHHHHH') + factory = _message_factory_from_opcode(dns.opcode.from_flags(flags)) + self.message = factory(id=id) + self.message.flags = dns.flags.Flag(flags) + self.initialize_message(self.message) + self.one_rr_per_rrset = \ + self.message._get_one_rr_per_rrset(self.one_rr_per_rrset) + try: + self._get_question(MessageSection.QUESTION, qcount) + if self.question_only: + return self.message + self._get_section(MessageSection.ANSWER, ancount) + self._get_section(MessageSection.AUTHORITY, aucount) + self._get_section(MessageSection.ADDITIONAL, adcount) + if not self.ignore_trailing and self.parser.remaining() != 0: + raise TrailingJunk + if self.multi and self.message.tsig_ctx and \ + not self.message.had_tsig: + self.message.tsig_ctx.update(self.parser.wire) + except Exception as e: + if self.continue_on_error: + self._add_error(e) + else: + raise + return self.message + + +def from_wire(wire, keyring=None, request_mac=b'', xfr=False, origin=None, + tsig_ctx=None, multi=False, + question_only=False, one_rr_per_rrset=False, + ignore_trailing=False, raise_on_truncation=False, + continue_on_error=False): + """Convert a DNS wire format message into a message object. + + *keyring*, a ``dns.tsig.Key`` or ``dict``, the key or keyring to use if the + message is signed. + + *request_mac*, a ``bytes``. If the message is a response to a TSIG-signed + request, *request_mac* should be set to the MAC of that request. + + *xfr*, a ``bool``, should be set to ``True`` if this message is part of a + zone transfer. + + *origin*, a ``dns.name.Name`` or ``None``. If the message is part of a zone + transfer, *origin* should be the origin name of the zone. If not ``None``, + names will be relativized to the origin. + + *tsig_ctx*, a ``dns.tsig.HMACTSig`` or ``dns.tsig.GSSTSig`` object, the + ongoing TSIG context, used when validating zone transfers. + + *multi*, a ``bool``, should be set to ``True`` if this message is part of a + multiple message sequence. + + *question_only*, a ``bool``. If ``True``, read only up to the end of the + question section. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of + the message. + + *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if the + TC bit is set. + + *continue_on_error*, a ``bool``. If ``True``, try to continue parsing even + if errors occur. Erroneous rdata will be ignored. Errors will be + accumulated as a list of MessageError objects in the message's ``errors`` + attribute. This option is recommended only for DNS analysis tools, or for + use in a server as part of an error handling path. The default is + ``False``. + + Raises ``dns.message.ShortHeader`` if the message is less than 12 octets + long. + + Raises ``dns.message.TrailingJunk`` if there were octets in the message past + the end of the proper DNS message, and *ignore_trailing* is ``False``. + + Raises ``dns.message.BadEDNS`` if an OPT record was in the wrong section, or + occurred more than once. + + Raises ``dns.message.BadTSIG`` if a TSIG record was not the last record of + the additional data section. + + Raises ``dns.message.Truncated`` if the TC flag is set and + *raise_on_truncation* is ``True``. + + Returns a ``dns.message.Message``. + """ + + def initialize_message(message): + message.request_mac = request_mac + message.xfr = xfr + message.origin = origin + message.tsig_ctx = tsig_ctx + + reader = _WireReader(wire, initialize_message, question_only, + one_rr_per_rrset, ignore_trailing, keyring, multi, + continue_on_error) + try: + m = reader.read() + except dns.exception.FormError: + if reader.message and (reader.message.flags & dns.flags.TC) and \ + raise_on_truncation: + raise Truncated(message=reader.message) + else: + raise + # Reading a truncated message might not have any errors, so we + # have to do this check here too. + if m.flags & dns.flags.TC and raise_on_truncation: + raise Truncated(message=m) + if continue_on_error: + m.errors = reader.errors + + return m + + +class _TextReader: + + """Text format reader. + + tok: the tokenizer. + message: The message object being built. + DNS dynamic updates. + last_name: The most recently read name when building a message object. + one_rr_per_rrset: Put each RR into its own RRset? + origin: The origin for relative names + relativize: relativize names? + relativize_to: the origin to relativize to. + """ + + def __init__(self, text, idna_codec, one_rr_per_rrset=False, + origin=None, relativize=True, relativize_to=None): + self.message = None + self.tok = dns.tokenizer.Tokenizer(text, idna_codec=idna_codec) + self.last_name = None + self.one_rr_per_rrset = one_rr_per_rrset + self.origin = origin + self.relativize = relativize + self.relativize_to = relativize_to + self.id = None + self.edns = -1 + self.ednsflags = 0 + self.payload = DEFAULT_EDNS_PAYLOAD + self.rcode = None + self.opcode = dns.opcode.QUERY + self.flags = 0 + + def _header_line(self, _): + """Process one line from the text format header section.""" + + token = self.tok.get() + what = token.value + if what == 'id': + self.id = self.tok.get_int() + elif what == 'flags': + while True: + token = self.tok.get() + if not token.is_identifier(): + self.tok.unget(token) + break + self.flags = self.flags | dns.flags.from_text(token.value) + elif what == 'edns': + self.edns = self.tok.get_int() + self.ednsflags = self.ednsflags | (self.edns << 16) + elif what == 'eflags': + if self.edns < 0: + self.edns = 0 + while True: + token = self.tok.get() + if not token.is_identifier(): + self.tok.unget(token) + break + self.ednsflags = self.ednsflags | \ + dns.flags.edns_from_text(token.value) + elif what == 'payload': + self.payload = self.tok.get_int() + if self.edns < 0: + self.edns = 0 + elif what == 'opcode': + text = self.tok.get_string() + self.opcode = dns.opcode.from_text(text) + self.flags = self.flags | dns.opcode.to_flags(self.opcode) + elif what == 'rcode': + text = self.tok.get_string() + self.rcode = dns.rcode.from_text(text) + else: + raise UnknownHeaderField + self.tok.get_eol() + + def _question_line(self, section_number): + """Process one line from the text format question section.""" + + section = self.message.sections[section_number] + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = self.tok.as_name(token, self.message.origin, + self.relativize, + self.relativize_to) + name = self.last_name + if name is None: + raise NoPreviousName + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = dns.rdataclass.IN + # Type + rdtype = dns.rdatatype.from_text(token.value) + (rdclass, rdtype, _, _) = \ + self.message._parse_rr_header(section_number, name, rdclass, rdtype) + self.message.find_rrset(section, name, rdclass, rdtype, create=True, + force_unique=True) + self.tok.get_eol() + + def _rr_line(self, section_number): + """Process one line from the text format answer, authority, or + additional data sections. + """ + + section = self.message.sections[section_number] + # Name + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = self.tok.as_name(token, self.message.origin, + self.relativize, + self.relativize_to) + name = self.last_name + if name is None: + raise NoPreviousName + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + # TTL + try: + ttl = int(token.value, 0) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + ttl = 0 + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = dns.rdataclass.IN + # Type + rdtype = dns.rdatatype.from_text(token.value) + (rdclass, rdtype, deleting, empty) = \ + self.message._parse_rr_header(section_number, name, rdclass, rdtype) + token = self.tok.get() + if empty and not token.is_eol_or_eof(): + raise dns.exception.SyntaxError + if not empty and token.is_eol_or_eof(): + raise dns.exception.UnexpectedEnd + if not token.is_eol_or_eof(): + self.tok.unget(token) + rd = dns.rdata.from_text(rdclass, rdtype, self.tok, + self.message.origin, self.relativize, + self.relativize_to) + covers = rd.covers() + else: + rd = None + covers = dns.rdatatype.NONE + rrset = self.message.find_rrset(section, name, + rdclass, rdtype, covers, + deleting, True, self.one_rr_per_rrset) + if rd is not None: + rrset.add(rd, ttl) + + def _make_message(self): + factory = _message_factory_from_opcode(self.opcode) + message = factory(id=self.id) + message.flags = self.flags + if self.edns >= 0: + message.use_edns(self.edns, self.ednsflags, self.payload) + if self.rcode: + message.set_rcode(self.rcode) + if self.origin: + message.origin = self.origin + return message + + def read(self): + """Read a text format DNS message and build a dns.message.Message + object.""" + + line_method = self._header_line + section_number = None + while 1: + token = self.tok.get(True, True) + if token.is_eol_or_eof(): + break + if token.is_comment(): + u = token.value.upper() + if u == 'HEADER': + line_method = self._header_line + + if self.message: + message = self.message + else: + # If we don't have a message, create one with the current + # opcode, so that we know which section names to parse. + message = self._make_message() + try: + section_number = message._section_enum.from_text(u) + # We found a section name. If we don't have a message, + # use the one we just created. + if not self.message: + self.message = message + self.one_rr_per_rrset = \ + message._get_one_rr_per_rrset(self.one_rr_per_rrset) + if section_number == MessageSection.QUESTION: + line_method = self._question_line + else: + line_method = self._rr_line + except Exception: + # It's just a comment. + pass + self.tok.get_eol() + continue + self.tok.unget(token) + line_method(section_number) + if not self.message: + self.message = self._make_message() + return self.message + + +def from_text(text, idna_codec=None, one_rr_per_rrset=False, + origin=None, relativize=True, relativize_to=None): + """Convert the text format message into a message object. + + The reader stops after reading the first blank line in the input to + facilitate reading multiple messages from a single file with + ``dns.message.from_file()``. + + *text*, a ``str``, the text format message. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *one_rr_per_rrset*, a ``bool``. If ``True``, then each RR is put + into its own rrset. The default is ``False``. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + Raises ``dns.message.UnknownHeaderField`` if a header is unknown. + + Raises ``dns.exception.SyntaxError`` if the text is badly formed. + + Returns a ``dns.message.Message object`` + """ + + # 'text' can also be a file, but we don't publish that fact + # since it's an implementation detail. The official file + # interface is from_file(). + + reader = _TextReader(text, idna_codec, one_rr_per_rrset, origin, + relativize, relativize_to) + return reader.read() + + +def from_file(f, idna_codec=None, one_rr_per_rrset=False): + """Read the next text format message from the specified file. + + Message blocks are separated by a single blank line. + + *f*, a ``file`` or ``str``. If *f* is text, it is treated as the + pathname of a file to open. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *one_rr_per_rrset*, a ``bool``. If ``True``, then each RR is put + into its own rrset. The default is ``False``. + + Raises ``dns.message.UnknownHeaderField`` if a header is unknown. + + Raises ``dns.exception.SyntaxError`` if the text is badly formed. + + Returns a ``dns.message.Message object`` + """ + + with contextlib.ExitStack() as stack: + if isinstance(f, str): + f = stack.enter_context(open(f)) + return from_text(f, idna_codec, one_rr_per_rrset) + + +def make_query(qname, rdtype, rdclass=dns.rdataclass.IN, use_edns=None, + want_dnssec=False, ednsflags=None, payload=None, + request_payload=None, options=None, idna_codec=None, + id=None, flags=dns.flags.RD): + """Make a query message. + + The query name, type, and class may all be specified either + as objects of the appropriate type, or as strings. + + The query will have a randomly chosen query id, and its DNS flags + will be set to dns.flags.RD. + + qname, a ``dns.name.Name`` or ``str``, the query name. + + *rdtype*, an ``int`` or ``str``, the desired rdata type. + + *rdclass*, an ``int`` or ``str``, the desired rdata class; the default + is class IN. + + *use_edns*, an ``int``, ``bool`` or ``None``. The EDNS level to use; the + default is ``None``. If ``None``, EDNS will be enabled only if other + parameters (*ednsflags*, *payload*, *request_payload*, or *options*) are + set. + See the description of dns.message.Message.use_edns() for the possible + values for use_edns and their meanings. + + *want_dnssec*, a ``bool``. If ``True``, DNSSEC data is desired. + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the + maximum size of UDP datagram the sender can handle. I.e. how big + a response to this message can be. + + *request_payload*, an ``int``, is the EDNS payload size to use when + sending this message. If not specified, defaults to the value of + *payload*. + + *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS + options. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *id*, an ``int`` or ``None``, the desired query id. The default is + ``None``, which generates a random query id. + + *flags*, an ``int``, the desired query flags. The default is + ``dns.flags.RD``. + + Returns a ``dns.message.QueryMessage`` + """ + + if isinstance(qname, str): + qname = dns.name.from_text(qname, idna_codec=idna_codec) + rdtype = dns.rdatatype.RdataType.make(rdtype) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + m = QueryMessage(id=id) + m.flags = dns.flags.Flag(flags) + m.find_rrset(m.question, qname, rdclass, rdtype, create=True, + force_unique=True) + # only pass keywords on to use_edns if they have been set to a + # non-None value. Setting a field will turn EDNS on if it hasn't + # been configured. + kwargs = {} + if ednsflags is not None: + kwargs['ednsflags'] = ednsflags + if payload is not None: + kwargs['payload'] = payload + if request_payload is not None: + kwargs['request_payload'] = request_payload + if options is not None: + kwargs['options'] = options + if kwargs and use_edns is None: + use_edns = 0 + kwargs['edns'] = use_edns + m.use_edns(**kwargs) + m.want_dnssec(want_dnssec) + return m + + +def make_response(query, recursion_available=False, our_payload=8192, + fudge=300, tsig_error=0): + """Make a message which is a response for the specified query. + The message returned is really a response skeleton; it has all + of the infrastructure required of a response, but none of the + content. + + The response's question section is a shallow copy of the query's + question section, so the query's question RRsets should not be + changed. + + *query*, a ``dns.message.Message``, the query to respond to. + + *recursion_available*, a ``bool``, should RA be set in the response? + + *our_payload*, an ``int``, the payload size to advertise in EDNS + responses. + + *fudge*, an ``int``, the TSIG time fudge. + + *tsig_error*, an ``int``, the TSIG error. + + Returns a ``dns.message.Message`` object whose specific class is + appropriate for the query. For example, if query is a + ``dns.update.UpdateMessage``, response will be too. + """ + + if query.flags & dns.flags.QR: + raise dns.exception.FormError('specified query message is not a query') + factory = _message_factory_from_opcode(query.opcode()) + response = factory(id=query.id) + response.flags = dns.flags.QR | (query.flags & dns.flags.RD) + if recursion_available: + response.flags |= dns.flags.RA + response.set_opcode(query.opcode()) + response.question = list(query.question) + if query.edns >= 0: + response.use_edns(0, 0, our_payload, query.payload) + if query.had_tsig: + response.use_tsig(query.keyring, query.keyname, fudge, None, + tsig_error, b'', query.keyalgorithm) + response.request_mac = query.mac + return response + +### BEGIN generated MessageSection constants + +QUESTION = MessageSection.QUESTION +ANSWER = MessageSection.ANSWER +AUTHORITY = MessageSection.AUTHORITY +ADDITIONAL = MessageSection.ADDITIONAL + +### END generated MessageSection constants diff --git a/venv/lib/python3.10/site-packages/dns/message.pyi b/venv/lib/python3.10/site-packages/dns/message.pyi new file mode 100644 index 0000000..252a411 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/message.pyi @@ -0,0 +1,47 @@ +from typing import Optional, Dict, List, Tuple, Union +from . import name, rrset, tsig, rdatatype, entropy, edns, rdataclass, rcode +import hmac + +class Message: + def to_wire(self, origin : Optional[name.Name]=None, max_size=0, **kw) -> bytes: + ... + def find_rrset(self, section : List[rrset.RRset], name : name.Name, rdclass : int, rdtype : int, + covers=rdatatype.NONE, deleting : Optional[int]=None, create=False, + force_unique=False) -> rrset.RRset: + ... + def __init__(self, id : Optional[int] =None) -> None: + self.id : int + self.flags = 0 + self.sections : List[List[rrset.RRset]] = [[], [], [], []] + self.opt : rrset.RRset = None + self.request_payload = 0 + self.keyring = None + self.tsig : rrset.RRset = None + self.request_mac = b'' + self.xfr = False + self.origin = None + self.tsig_ctx = None + self.index : Dict[Tuple[rrset.RRset, name.Name, int, int, Union[int,str], int], rrset.RRset] = {} + + def is_response(self, other : Message) -> bool: + ... + + def set_rcode(self, rcode : rcode.Rcode): + ... + +def from_text(a : str, idna_codec : Optional[name.IDNACodec] = None) -> Message: + ... + +def from_wire(wire, keyring : Optional[Dict[name.Name,bytes]] = None, request_mac = b'', xfr=False, origin=None, + tsig_ctx : Optional[Union[dns.tsig.HMACTSig, dns.tsig.GSSTSig]] = None, multi=False, + question_only=False, one_rr_per_rrset=False, + ignore_trailing=False) -> Message: + ... +def make_response(query : Message, recursion_available=False, our_payload=8192, + fudge=300) -> Message: + ... + +def make_query(qname : Union[name.Name,str], rdtype : Union[str,int], rdclass : Union[int,str] =rdataclass.IN, use_edns : Optional[bool] = None, + want_dnssec=False, ednsflags : Optional[int] = None, payload : Optional[int] = None, + request_payload : Optional[int] = None, options : Optional[List[edns.Option]] = None) -> Message: + ... diff --git a/venv/lib/python3.10/site-packages/dns/name.py b/venv/lib/python3.10/site-packages/dns/name.py new file mode 100644 index 0000000..8905d70 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/name.py @@ -0,0 +1,1018 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Names. +""" + +import copy +import struct + +import encodings.idna # type: ignore +try: + import idna # type: ignore + have_idna_2008 = True +except ImportError: # pragma: no cover + have_idna_2008 = False + +import dns.wire +import dns.exception +import dns.immutable + +# fullcompare() result values + +#: The compared names have no relationship to each other. +NAMERELN_NONE = 0 +#: the first name is a superdomain of the second. +NAMERELN_SUPERDOMAIN = 1 +#: The first name is a subdomain of the second. +NAMERELN_SUBDOMAIN = 2 +#: The compared names are equal. +NAMERELN_EQUAL = 3 +#: The compared names have a common ancestor. +NAMERELN_COMMONANCESTOR = 4 + + +class EmptyLabel(dns.exception.SyntaxError): + """A DNS label is empty.""" + + +class BadEscape(dns.exception.SyntaxError): + """An escaped code in a text format of DNS name is invalid.""" + + +class BadPointer(dns.exception.FormError): + """A DNS compression pointer points forward instead of backward.""" + + +class BadLabelType(dns.exception.FormError): + """The label type in DNS name wire format is unknown.""" + + +class NeedAbsoluteNameOrOrigin(dns.exception.DNSException): + """An attempt was made to convert a non-absolute name to + wire when there was also a non-absolute (or missing) origin.""" + + +class NameTooLong(dns.exception.FormError): + """A DNS name is > 255 octets long.""" + + +class LabelTooLong(dns.exception.SyntaxError): + """A DNS label is > 63 octets long.""" + + +class AbsoluteConcatenation(dns.exception.DNSException): + """An attempt was made to append anything other than the + empty name to an absolute DNS name.""" + + +class NoParent(dns.exception.DNSException): + """An attempt was made to get the parent of the root name + or the empty name.""" + +class NoIDNA2008(dns.exception.DNSException): + """IDNA 2008 processing was requested but the idna module is not + available.""" + + +class IDNAException(dns.exception.DNSException): + """IDNA processing raised an exception.""" + + supp_kwargs = {'idna_exception'} + fmt = "IDNA processing exception: {idna_exception}" + + +class IDNACodec: + """Abstract base class for IDNA encoder/decoders.""" + + def __init__(self): + pass + + def is_idna(self, label): + return label.lower().startswith(b'xn--') + + def encode(self, label): + raise NotImplementedError # pragma: no cover + + def decode(self, label): + # We do not apply any IDNA policy on decode. + if self.is_idna(label): + try: + label = label[4:].decode('punycode') + except Exception as e: + raise IDNAException(idna_exception=e) + return _escapify(label) + + +class IDNA2003Codec(IDNACodec): + """IDNA 2003 encoder/decoder.""" + + def __init__(self, strict_decode=False): + """Initialize the IDNA 2003 encoder/decoder. + + *strict_decode* is a ``bool``. If `True`, then IDNA2003 checking + is done when decoding. This can cause failures if the name + was encoded with IDNA2008. The default is `False`. + """ + + super().__init__() + self.strict_decode = strict_decode + + def encode(self, label): + """Encode *label*.""" + + if label == '': + return b'' + try: + return encodings.idna.ToASCII(label) + except UnicodeError: + raise LabelTooLong + + def decode(self, label): + """Decode *label*.""" + if not self.strict_decode: + return super().decode(label) + if label == b'': + return '' + try: + return _escapify(encodings.idna.ToUnicode(label)) + except Exception as e: + raise IDNAException(idna_exception=e) + + +class IDNA2008Codec(IDNACodec): + """IDNA 2008 encoder/decoder. + """ + + def __init__(self, uts_46=False, transitional=False, + allow_pure_ascii=False, strict_decode=False): + """Initialize the IDNA 2008 encoder/decoder. + + *uts_46* is a ``bool``. If True, apply Unicode IDNA + compatibility processing as described in Unicode Technical + Standard #46 (http://unicode.org/reports/tr46/). + If False, do not apply the mapping. The default is False. + + *transitional* is a ``bool``: If True, use the + "transitional" mode described in Unicode Technical Standard + #46. The default is False. + + *allow_pure_ascii* is a ``bool``. If True, then a label which + consists of only ASCII characters is allowed. This is less + strict than regular IDNA 2008, but is also necessary for mixed + names, e.g. a name with starting with "_sip._tcp." and ending + in an IDN suffix which would otherwise be disallowed. The + default is False. + + *strict_decode* is a ``bool``: If True, then IDNA2008 checking + is done when decoding. This can cause failures if the name + was encoded with IDNA2003. The default is False. + """ + super().__init__() + self.uts_46 = uts_46 + self.transitional = transitional + self.allow_pure_ascii = allow_pure_ascii + self.strict_decode = strict_decode + + def encode(self, label): + if label == '': + return b'' + if self.allow_pure_ascii and is_all_ascii(label): + encoded = label.encode('ascii') + if len(encoded) > 63: + raise LabelTooLong + return encoded + if not have_idna_2008: + raise NoIDNA2008 + try: + if self.uts_46: + label = idna.uts46_remap(label, False, self.transitional) + return idna.alabel(label) + except idna.IDNAError as e: + if e.args[0] == 'Label too long': + raise LabelTooLong + else: + raise IDNAException(idna_exception=e) + + def decode(self, label): + if not self.strict_decode: + return super().decode(label) + if label == b'': + return '' + if not have_idna_2008: + raise NoIDNA2008 + try: + ulabel = idna.ulabel(label) + if self.uts_46: + ulabel = idna.uts46_remap(ulabel, False, self.transitional) + return _escapify(ulabel) + except (idna.IDNAError, UnicodeError) as e: + raise IDNAException(idna_exception=e) + +_escaped = b'"().;\\@$' +_escaped_text = '"().;\\@$' + +IDNA_2003_Practical = IDNA2003Codec(False) +IDNA_2003_Strict = IDNA2003Codec(True) +IDNA_2003 = IDNA_2003_Practical +IDNA_2008_Practical = IDNA2008Codec(True, False, True, False) +IDNA_2008_UTS_46 = IDNA2008Codec(True, False, False, False) +IDNA_2008_Strict = IDNA2008Codec(False, False, False, True) +IDNA_2008_Transitional = IDNA2008Codec(True, True, False, False) +IDNA_2008 = IDNA_2008_Practical + +def _escapify(label): + """Escape the characters in label which need it. + @returns: the escaped string + @rtype: string""" + if isinstance(label, bytes): + # Ordinary DNS label mode. Escape special characters and values + # < 0x20 or > 0x7f. + text = '' + for c in label: + if c in _escaped: + text += '\\' + chr(c) + elif c > 0x20 and c < 0x7F: + text += chr(c) + else: + text += '\\%03d' % c + return text + + # Unicode label mode. Escape only special characters and values < 0x20 + text = '' + for c in label: + if c in _escaped_text: + text += '\\' + c + elif c <= '\x20': + text += '\\%03d' % ord(c) + else: + text += c + return text + +def _validate_labels(labels): + """Check for empty labels in the middle of a label sequence, + labels that are too long, and for too many labels. + + Raises ``dns.name.NameTooLong`` if the name as a whole is too long. + + Raises ``dns.name.EmptyLabel`` if a label is empty (i.e. the root + label) and appears in a position other than the end of the label + sequence + + """ + + l = len(labels) + total = 0 + i = -1 + j = 0 + for label in labels: + ll = len(label) + total += ll + 1 + if ll > 63: + raise LabelTooLong + if i < 0 and label == b'': + i = j + j += 1 + if total > 255: + raise NameTooLong + if i >= 0 and i != l - 1: + raise EmptyLabel + + +def _maybe_convert_to_binary(label): + """If label is ``str``, convert it to ``bytes``. If it is already + ``bytes`` just return it. + + """ + + if isinstance(label, bytes): + return label + if isinstance(label, str): + return label.encode() + raise ValueError # pragma: no cover + + +@dns.immutable.immutable +class Name: + + """A DNS name. + + The dns.name.Name class represents a DNS name as a tuple of + labels. Each label is a ``bytes`` in DNS wire format. Instances + of the class are immutable. + """ + + __slots__ = ['labels'] + + def __init__(self, labels): + """*labels* is any iterable whose values are ``str`` or ``bytes``. + """ + + labels = [_maybe_convert_to_binary(x) for x in labels] + self.labels = tuple(labels) + _validate_labels(self.labels) + + def __copy__(self): + return Name(self.labels) + + def __deepcopy__(self, memo): + return Name(copy.deepcopy(self.labels, memo)) + + def __getstate__(self): + # Names can be pickled + return {'labels': self.labels} + + def __setstate__(self, state): + super().__setattr__('labels', state['labels']) + _validate_labels(self.labels) + + def is_absolute(self): + """Is the most significant label of this name the root label? + + Returns a ``bool``. + """ + + return len(self.labels) > 0 and self.labels[-1] == b'' + + def is_wild(self): + """Is this name wild? (I.e. Is the least significant label '*'?) + + Returns a ``bool``. + """ + + return len(self.labels) > 0 and self.labels[0] == b'*' + + def __hash__(self): + """Return a case-insensitive hash of the name. + + Returns an ``int``. + """ + + h = 0 + for label in self.labels: + for c in label.lower(): + h += (h << 3) + c + return h + + def fullcompare(self, other): + """Compare two names, returning a 3-tuple + ``(relation, order, nlabels)``. + + *relation* describes the relation ship between the names, + and is one of: ``dns.name.NAMERELN_NONE``, + ``dns.name.NAMERELN_SUPERDOMAIN``, ``dns.name.NAMERELN_SUBDOMAIN``, + ``dns.name.NAMERELN_EQUAL``, or ``dns.name.NAMERELN_COMMONANCESTOR``. + + *order* is < 0 if *self* < *other*, > 0 if *self* > *other*, and == + 0 if *self* == *other*. A relative name is always less than an + absolute name. If both names have the same relativity, then + the DNSSEC order relation is used to order them. + + *nlabels* is the number of significant labels that the two names + have in common. + + Here are some examples. Names ending in "." are absolute names, + those not ending in "." are relative names. + + ============= ============= =========== ===== ======= + self other relation order nlabels + ============= ============= =========== ===== ======= + www.example. www.example. equal 0 3 + www.example. example. subdomain > 0 2 + example. www.example. superdomain < 0 2 + example1.com. example2.com. common anc. < 0 2 + example1 example2. none < 0 0 + example1. example2 none > 0 0 + ============= ============= =========== ===== ======= + """ + + sabs = self.is_absolute() + oabs = other.is_absolute() + if sabs != oabs: + if sabs: + return (NAMERELN_NONE, 1, 0) + else: + return (NAMERELN_NONE, -1, 0) + l1 = len(self.labels) + l2 = len(other.labels) + ldiff = l1 - l2 + if ldiff < 0: + l = l1 + else: + l = l2 + + order = 0 + nlabels = 0 + namereln = NAMERELN_NONE + while l > 0: + l -= 1 + l1 -= 1 + l2 -= 1 + label1 = self.labels[l1].lower() + label2 = other.labels[l2].lower() + if label1 < label2: + order = -1 + if nlabels > 0: + namereln = NAMERELN_COMMONANCESTOR + return (namereln, order, nlabels) + elif label1 > label2: + order = 1 + if nlabels > 0: + namereln = NAMERELN_COMMONANCESTOR + return (namereln, order, nlabels) + nlabels += 1 + order = ldiff + if ldiff < 0: + namereln = NAMERELN_SUPERDOMAIN + elif ldiff > 0: + namereln = NAMERELN_SUBDOMAIN + else: + namereln = NAMERELN_EQUAL + return (namereln, order, nlabels) + + def is_subdomain(self, other): + """Is self a subdomain of other? + + Note that the notion of subdomain includes equality, e.g. + "dnpython.org" is a subdomain of itself. + + Returns a ``bool``. + """ + + (nr, _, _) = self.fullcompare(other) + if nr == NAMERELN_SUBDOMAIN or nr == NAMERELN_EQUAL: + return True + return False + + def is_superdomain(self, other): + """Is self a superdomain of other? + + Note that the notion of superdomain includes equality, e.g. + "dnpython.org" is a superdomain of itself. + + Returns a ``bool``. + """ + + (nr, _, _) = self.fullcompare(other) + if nr == NAMERELN_SUPERDOMAIN or nr == NAMERELN_EQUAL: + return True + return False + + def canonicalize(self): + """Return a name which is equal to the current name, but is in + DNSSEC canonical form. + """ + + return Name([x.lower() for x in self.labels]) + + def __eq__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] == 0 + else: + return False + + def __ne__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] != 0 + else: + return True + + def __lt__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] < 0 + else: + return NotImplemented + + def __le__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] <= 0 + else: + return NotImplemented + + def __ge__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] >= 0 + else: + return NotImplemented + + def __gt__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] > 0 + else: + return NotImplemented + + def __repr__(self): + return '' + + def __str__(self): + return self.to_text(False) + + def to_text(self, omit_final_dot=False): + """Convert name to DNS text format. + + *omit_final_dot* is a ``bool``. If True, don't emit the final + dot (denoting the root label) for absolute names. The default + is False. + + Returns a ``str``. + """ + + if len(self.labels) == 0: + return '@' + if len(self.labels) == 1 and self.labels[0] == b'': + return '.' + if omit_final_dot and self.is_absolute(): + l = self.labels[:-1] + else: + l = self.labels + s = '.'.join(map(_escapify, l)) + return s + + def to_unicode(self, omit_final_dot=False, idna_codec=None): + """Convert name to Unicode text format. + + IDN ACE labels are converted to Unicode. + + *omit_final_dot* is a ``bool``. If True, don't emit the final + dot (denoting the root label) for absolute names. The default + is False. + *idna_codec* specifies the IDNA encoder/decoder. If None, the + dns.name.IDNA_2003_Practical encoder/decoder is used. + The IDNA_2003_Practical decoder does + not impose any policy, it just decodes punycode, so if you + don't want checking for compliance, you can use this decoder + for IDNA2008 as well. + + Returns a ``str``. + """ + + if len(self.labels) == 0: + return '@' + if len(self.labels) == 1 and self.labels[0] == b'': + return '.' + if omit_final_dot and self.is_absolute(): + l = self.labels[:-1] + else: + l = self.labels + if idna_codec is None: + idna_codec = IDNA_2003_Practical + return '.'.join([idna_codec.decode(x) for x in l]) + + def to_digestable(self, origin=None): + """Convert name to a format suitable for digesting in hashes. + + The name is canonicalized and converted to uncompressed wire + format. All names in wire format are absolute. If the name + is a relative name, then an origin must be supplied. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then origin will be appended + to the name. + + Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is + relative and no origin was provided. + + Returns a ``bytes``. + """ + + return self.to_wire(origin=origin, canonicalize=True) + + def to_wire(self, file=None, compress=None, origin=None, + canonicalize=False): + """Convert name to wire format, possibly compressing it. + + *file* is the file where the name is emitted (typically an + io.BytesIO file). If ``None`` (the default), a ``bytes`` + containing the wire name will be returned. + + *compress*, a ``dict``, is the compression table to use. If + ``None`` (the default), names will not be compressed. Note that + the compression code assumes that compression offset 0 is the + start of *file*, and thus compression will not be correct + if this is not the case. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then *origin* will be appended + to it. + + *canonicalize*, a ``bool``, indicates whether the name should + be canonicalized; that is, converted to a format suitable for + digesting in hashes. + + Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is + relative and no origin was provided. + + Returns a ``bytes`` or ``None``. + """ + + if file is None: + out = bytearray() + for label in self.labels: + out.append(len(label)) + if canonicalize: + out += label.lower() + else: + out += label + if not self.is_absolute(): + if origin is None or not origin.is_absolute(): + raise NeedAbsoluteNameOrOrigin + for label in origin.labels: + out.append(len(label)) + if canonicalize: + out += label.lower() + else: + out += label + return bytes(out) + + if not self.is_absolute(): + if origin is None or not origin.is_absolute(): + raise NeedAbsoluteNameOrOrigin + labels = list(self.labels) + labels.extend(list(origin.labels)) + else: + labels = self.labels + i = 0 + for label in labels: + n = Name(labels[i:]) + i += 1 + if compress is not None: + pos = compress.get(n) + else: + pos = None + if pos is not None: + value = 0xc000 + pos + s = struct.pack('!H', value) + file.write(s) + break + else: + if compress is not None and len(n) > 1: + pos = file.tell() + if pos <= 0x3fff: + compress[n] = pos + l = len(label) + file.write(struct.pack('!B', l)) + if l > 0: + if canonicalize: + file.write(label.lower()) + else: + file.write(label) + + def __len__(self): + """The length of the name (in labels). + + Returns an ``int``. + """ + + return len(self.labels) + + def __getitem__(self, index): + return self.labels[index] + + def __add__(self, other): + return self.concatenate(other) + + def __sub__(self, other): + return self.relativize(other) + + def split(self, depth): + """Split a name into a prefix and suffix names at the specified depth. + + *depth* is an ``int`` specifying the number of labels in the suffix + + Raises ``ValueError`` if *depth* was not >= 0 and <= the length of the + name. + + Returns the tuple ``(prefix, suffix)``. + """ + + l = len(self.labels) + if depth == 0: + return (self, dns.name.empty) + elif depth == l: + return (dns.name.empty, self) + elif depth < 0 or depth > l: + raise ValueError( + 'depth must be >= 0 and <= the length of the name') + return (Name(self[: -depth]), Name(self[-depth:])) + + def concatenate(self, other): + """Return a new name which is the concatenation of self and other. + + Raises ``dns.name.AbsoluteConcatenation`` if the name is + absolute and *other* is not the empty name. + + Returns a ``dns.name.Name``. + """ + + if self.is_absolute() and len(other) > 0: + raise AbsoluteConcatenation + labels = list(self.labels) + labels.extend(list(other.labels)) + return Name(labels) + + def relativize(self, origin): + """If the name is a subdomain of *origin*, return a new name which is + the name relative to origin. Otherwise return the name. + + For example, relativizing ``www.dnspython.org.`` to origin + ``dnspython.org.`` returns the name ``www``. Relativizing ``example.`` + to origin ``dnspython.org.`` returns ``example.``. + + Returns a ``dns.name.Name``. + """ + + if origin is not None and self.is_subdomain(origin): + return Name(self[: -len(origin)]) + else: + return self + + def derelativize(self, origin): + """If the name is a relative name, return a new name which is the + concatenation of the name and origin. Otherwise return the name. + + For example, derelativizing ``www`` to origin ``dnspython.org.`` + returns the name ``www.dnspython.org.``. Derelativizing ``example.`` + to origin ``dnspython.org.`` returns ``example.``. + + Returns a ``dns.name.Name``. + """ + + if not self.is_absolute(): + return self.concatenate(origin) + else: + return self + + def choose_relativity(self, origin=None, relativize=True): + """Return a name with the relativity desired by the caller. + + If *origin* is ``None``, then the name is returned. + Otherwise, if *relativize* is ``True`` the name is + relativized, and if *relativize* is ``False`` the name is + derelativized. + + Returns a ``dns.name.Name``. + """ + + if origin: + if relativize: + return self.relativize(origin) + else: + return self.derelativize(origin) + else: + return self + + def parent(self): + """Return the parent of the name. + + For example, the parent of ``www.dnspython.org.`` is ``dnspython.org``. + + Raises ``dns.name.NoParent`` if the name is either the root name or the + empty name, and thus has no parent. + + Returns a ``dns.name.Name``. + """ + + if self == root or self == empty: + raise NoParent + return Name(self.labels[1:]) + +#: The root name, '.' +root = Name([b'']) + +#: The empty name. +empty = Name([]) + +def from_unicode(text, origin=root, idna_codec=None): + """Convert unicode text into a Name object. + + Labels are encoded in IDN ACE form according to rules specified by + the IDNA codec. + + *text*, a ``str``, is the text to convert into a name. + + *origin*, a ``dns.name.Name``, specifies the origin to + append to non-absolute names. The default is the root name. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Returns a ``dns.name.Name``. + """ + + if not isinstance(text, str): + raise ValueError("input to from_unicode() must be a unicode string") + if not (origin is None or isinstance(origin, Name)): + raise ValueError("origin must be a Name or None") + labels = [] + label = '' + escaping = False + edigits = 0 + total = 0 + if idna_codec is None: + idna_codec = IDNA_2003 + if text == '@': + text = '' + if text: + if text in ['.', '\u3002', '\uff0e', '\uff61']: + return Name([b'']) # no Unicode "u" on this constant! + for c in text: + if escaping: + if edigits == 0: + if c.isdigit(): + total = int(c) + edigits += 1 + else: + label += c + escaping = False + else: + if not c.isdigit(): + raise BadEscape + total *= 10 + total += int(c) + edigits += 1 + if edigits == 3: + escaping = False + label += chr(total) + elif c in ['.', '\u3002', '\uff0e', '\uff61']: + if len(label) == 0: + raise EmptyLabel + labels.append(idna_codec.encode(label)) + label = '' + elif c == '\\': + escaping = True + edigits = 0 + total = 0 + else: + label += c + if escaping: + raise BadEscape + if len(label) > 0: + labels.append(idna_codec.encode(label)) + else: + labels.append(b'') + + if (len(labels) == 0 or labels[-1] != b'') and origin is not None: + labels.extend(list(origin.labels)) + return Name(labels) + +def is_all_ascii(text): + for c in text: + if ord(c) > 0x7f: + return False + return True + +def from_text(text, origin=root, idna_codec=None): + """Convert text into a Name object. + + *text*, a ``str``, is the text to convert into a name. + + *origin*, a ``dns.name.Name``, specifies the origin to + append to non-absolute names. The default is the root name. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Returns a ``dns.name.Name``. + """ + + if isinstance(text, str): + if not is_all_ascii(text): + # Some codepoint in the input text is > 127, so IDNA applies. + return from_unicode(text, origin, idna_codec) + # The input is all ASCII, so treat this like an ordinary non-IDNA + # domain name. Note that "all ASCII" is about the input text, + # not the codepoints in the domain name. E.g. if text has value + # + # r'\150\151\152\153\154\155\156\157\158\159' + # + # then it's still "all ASCII" even though the domain name has + # codepoints > 127. + text = text.encode('ascii') + if not isinstance(text, bytes): + raise ValueError("input to from_text() must be a string") + if not (origin is None or isinstance(origin, Name)): + raise ValueError("origin must be a Name or None") + labels = [] + label = b'' + escaping = False + edigits = 0 + total = 0 + if text == b'@': + text = b'' + if text: + if text == b'.': + return Name([b'']) + for c in text: + byte_ = struct.pack('!B', c) + if escaping: + if edigits == 0: + if byte_.isdigit(): + total = int(byte_) + edigits += 1 + else: + label += byte_ + escaping = False + else: + if not byte_.isdigit(): + raise BadEscape + total *= 10 + total += int(byte_) + edigits += 1 + if edigits == 3: + escaping = False + label += struct.pack('!B', total) + elif byte_ == b'.': + if len(label) == 0: + raise EmptyLabel + labels.append(label) + label = b'' + elif byte_ == b'\\': + escaping = True + edigits = 0 + total = 0 + else: + label += byte_ + if escaping: + raise BadEscape + if len(label) > 0: + labels.append(label) + else: + labels.append(b'') + if (len(labels) == 0 or labels[-1] != b'') and origin is not None: + labels.extend(list(origin.labels)) + return Name(labels) + + +def from_wire_parser(parser): + """Convert possibly compressed wire format into a Name. + + *parser* is a dns.wire.Parser. + + Raises ``dns.name.BadPointer`` if a compression pointer did not + point backwards in the message. + + Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. + + Returns a ``dns.name.Name`` + """ + + labels = [] + biggest_pointer = parser.current + with parser.restore_furthest(): + count = parser.get_uint8() + while count != 0: + if count < 64: + labels.append(parser.get_bytes(count)) + elif count >= 192: + current = (count & 0x3f) * 256 + parser.get_uint8() + if current >= biggest_pointer: + raise BadPointer + biggest_pointer = current + parser.seek(current) + else: + raise BadLabelType + count = parser.get_uint8() + labels.append(b'') + return Name(labels) + + +def from_wire(message, current): + """Convert possibly compressed wire format into a Name. + + *message* is a ``bytes`` containing an entire DNS message in DNS + wire form. + + *current*, an ``int``, is the offset of the beginning of the name + from the start of the message + + Raises ``dns.name.BadPointer`` if a compression pointer did not + point backwards in the message. + + Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. + + Returns a ``(dns.name.Name, int)`` tuple consisting of the name + that was read and the number of bytes of the wire format message + which were consumed reading it. + """ + + if not isinstance(message, bytes): + raise ValueError("input to from_wire() must be a byte string") + parser = dns.wire.Parser(message, current) + name = from_wire_parser(parser) + return (name, parser.current - current) diff --git a/venv/lib/python3.10/site-packages/dns/name.pyi b/venv/lib/python3.10/site-packages/dns/name.pyi new file mode 100644 index 0000000..c48d4bd --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/name.pyi @@ -0,0 +1,40 @@ +from typing import Optional, Union, Tuple, Iterable, List + +have_idna_2008: bool + +class Name: + def is_subdomain(self, o : Name) -> bool: ... + def is_superdomain(self, o : Name) -> bool: ... + def __init__(self, labels : Iterable[Union[bytes,str]]) -> None: + self.labels : List[bytes] + def is_absolute(self) -> bool: ... + def is_wild(self) -> bool: ... + def fullcompare(self, other) -> Tuple[int,int,int]: ... + def canonicalize(self) -> Name: ... + def __eq__(self, other) -> bool: ... + def __ne__(self, other) -> bool: ... + def __lt__(self, other : Name) -> bool: ... + def __le__(self, other : Name) -> bool: ... + def __ge__(self, other : Name) -> bool: ... + def __gt__(self, other : Name) -> bool: ... + def to_text(self, omit_final_dot=False) -> str: ... + def to_unicode(self, omit_final_dot=False, idna_codec=None) -> str: ... + def to_digestable(self, origin=None) -> bytes: ... + def to_wire(self, file=None, compress=None, origin=None, + canonicalize=False) -> Optional[bytes]: ... + def __add__(self, other : Name) -> Name: ... + def __sub__(self, other : Name) -> Name: ... + def split(self, depth) -> List[Tuple[str,str]]: ... + def concatenate(self, other : Name) -> Name: ... + def relativize(self, origin) -> Name: ... + def derelativize(self, origin) -> Name: ... + def choose_relativity(self, origin : Optional[Name] = None, relativize=True) -> Name: ... + def parent(self) -> Name: ... + +class IDNACodec: + pass + +def from_text(text, origin : Optional[Name] = Name('.'), idna_codec : Optional[IDNACodec] = None) -> Name: + ... + +empty : Name diff --git a/venv/lib/python3.10/site-packages/dns/namedict.py b/venv/lib/python3.10/site-packages/dns/namedict.py new file mode 100644 index 0000000..ec0750c --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/namedict.py @@ -0,0 +1,108 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# Copyright (C) 2016 Coresec Systems AB +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND CORESEC SYSTEMS AB DISCLAIMS ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL CORESEC +# SYSTEMS AB BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS name dictionary""" + +from collections.abc import MutableMapping + +import dns.name + + +class NameDict(MutableMapping): + """A dictionary whose keys are dns.name.Name objects. + + In addition to being like a regular Python dictionary, this + dictionary can also get the deepest match for a given key. + """ + + __slots__ = ["max_depth", "max_depth_items", "__store"] + + def __init__(self, *args, **kwargs): + super().__init__() + self.__store = dict() + #: the maximum depth of the keys that have ever been added + self.max_depth = 0 + #: the number of items of maximum depth + self.max_depth_items = 0 + self.update(dict(*args, **kwargs)) + + def __update_max_depth(self, key): + if len(key) == self.max_depth: + self.max_depth_items = self.max_depth_items + 1 + elif len(key) > self.max_depth: + self.max_depth = len(key) + self.max_depth_items = 1 + + def __getitem__(self, key): + return self.__store[key] + + def __setitem__(self, key, value): + if not isinstance(key, dns.name.Name): + raise ValueError('NameDict key must be a name') + self.__store[key] = value + self.__update_max_depth(key) + + def __delitem__(self, key): + self.__store.pop(key) + if len(key) == self.max_depth: + self.max_depth_items = self.max_depth_items - 1 + if self.max_depth_items == 0: + self.max_depth = 0 + for k in self.__store: + self.__update_max_depth(k) + + def __iter__(self): + return iter(self.__store) + + def __len__(self): + return len(self.__store) + + def has_key(self, key): + return key in self.__store + + def get_deepest_match(self, name): + """Find the deepest match to *name* in the dictionary. + + The deepest match is the longest name in the dictionary which is + a superdomain of *name*. Note that *superdomain* includes matching + *name* itself. + + *name*, a ``dns.name.Name``, the name to find. + + Returns a ``(key, value)`` where *key* is the deepest + ``dns.name.Name``, and *value* is the value associated with *key*. + """ + + depth = len(name) + if depth > self.max_depth: + depth = self.max_depth + for i in range(-depth, 0): + n = dns.name.Name(name[i:]) + if n in self: + return (n, self[n]) + v = self[dns.name.empty] + return (dns.name.empty, v) diff --git a/venv/lib/python3.10/site-packages/dns/node.py b/venv/lib/python3.10/site-packages/dns/node.py new file mode 100644 index 0000000..63ce008 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/node.py @@ -0,0 +1,320 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS nodes. A node is a set of rdatasets.""" + +import enum +import io + +import dns.immutable +import dns.rdataset +import dns.rdatatype +import dns.renderer + + +_cname_types = { + dns.rdatatype.CNAME, +} + +# "neutral" types can coexist with a CNAME and thus are not "other data" +_neutral_types = { + dns.rdatatype.NSEC, # RFC 4035 section 2.5 + dns.rdatatype.NSEC3, # This is not likely to happen, but not impossible! + dns.rdatatype.KEY, # RFC 4035 section 2.5, RFC 3007 +} + +def _matches_type_or_its_signature(rdtypes, rdtype, covers): + return rdtype in rdtypes or \ + (rdtype == dns.rdatatype.RRSIG and covers in rdtypes) + + +@enum.unique +class NodeKind(enum.Enum): + """Rdatasets in nodes + """ + REGULAR = 0 # a.k.a "other data" + NEUTRAL = 1 + CNAME = 2 + + @classmethod + def classify(cls, rdtype, covers): + if _matches_type_or_its_signature(_cname_types, rdtype, covers): + return NodeKind.CNAME + elif _matches_type_or_its_signature(_neutral_types, rdtype, covers): + return NodeKind.NEUTRAL + else: + return NodeKind.REGULAR + + @classmethod + def classify_rdataset(cls, rdataset): + return cls.classify(rdataset.rdtype, rdataset.covers) + + +class Node: + + """A Node is a set of rdatasets. + + A node is either a CNAME node or an "other data" node. A CNAME + node contains only CNAME, KEY, NSEC, and NSEC3 rdatasets along with their + covering RRSIG rdatasets. An "other data" node contains any + rdataset other than a CNAME or RRSIG(CNAME) rdataset. When + changes are made to a node, the CNAME or "other data" state is + always consistent with the update, i.e. the most recent change + wins. For example, if you have a node which contains a CNAME + rdataset, and then add an MX rdataset to it, then the CNAME + rdataset will be deleted. Likewise if you have a node containing + an MX rdataset and add a CNAME rdataset, the MX rdataset will be + deleted. + """ + + __slots__ = ['rdatasets'] + + def __init__(self): + # the set of rdatasets, represented as a list. + self.rdatasets = [] + + def to_text(self, name, **kw): + """Convert a node to text format. + + Each rdataset at the node is printed. Any keyword arguments + to this method are passed on to the rdataset's to_text() method. + + *name*, a ``dns.name.Name`` or ``str``, the owner name of the + rdatasets. + + Returns a ``str``. + + """ + + s = io.StringIO() + for rds in self.rdatasets: + if len(rds) > 0: + s.write(rds.to_text(name, **kw)) + s.write('\n') + return s.getvalue()[:-1] + + def __repr__(self): + return '' + + def __eq__(self, other): + # + # This is inefficient. Good thing we don't need to do it much. + # + for rd in self.rdatasets: + if rd not in other.rdatasets: + return False + for rd in other.rdatasets: + if rd not in self.rdatasets: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def __len__(self): + return len(self.rdatasets) + + def __iter__(self): + return iter(self.rdatasets) + + def _append_rdataset(self, rdataset): + """Append rdataset to the node with special handling for CNAME and + other data conditions. + + Specifically, if the rdataset being appended has ``NodeKind.CNAME``, + then all rdatasets other than KEY, NSEC, NSEC3, and their covering + RRSIGs are deleted. If the rdataset being appended has + ``NodeKind.REGULAR`` then CNAME and RRSIG(CNAME) are deleted. + """ + # Make having just one rdataset at the node fast. + if len(self.rdatasets) > 0: + kind = NodeKind.classify_rdataset(rdataset) + if kind == NodeKind.CNAME: + self.rdatasets = [rds for rds in self.rdatasets if + NodeKind.classify_rdataset(rds) != + NodeKind.REGULAR] + elif kind == NodeKind.REGULAR: + self.rdatasets = [rds for rds in self.rdatasets if + NodeKind.classify_rdataset(rds) != + NodeKind.CNAME] + # Otherwise the rdataset is NodeKind.NEUTRAL and we do not need to + # edit self.rdatasets. + self.rdatasets.append(rdataset) + + def find_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE, + create=False): + """Find an rdataset matching the specified properties in the + current node. + + *rdclass*, an ``int``, the class of the rdataset. + + *rdtype*, an ``int``, the type of the rdataset. + + *covers*, an ``int`` or ``None``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If True, create the rdataset if it is not found. + + Raises ``KeyError`` if an rdataset of the desired type and class does + not exist and *create* is not ``True``. + + Returns a ``dns.rdataset.Rdataset``. + """ + + for rds in self.rdatasets: + if rds.match(rdclass, rdtype, covers): + return rds + if not create: + raise KeyError + rds = dns.rdataset.Rdataset(rdclass, rdtype, covers) + self._append_rdataset(rds) + return rds + + def get_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE, + create=False): + """Get an rdataset matching the specified properties in the + current node. + + None is returned if an rdataset of the specified type and + class does not exist and *create* is not ``True``. + + *rdclass*, an ``int``, the class of the rdataset. + + *rdtype*, an ``int``, the type of the rdataset. + + *covers*, an ``int``, the covered type. Usually this value is + dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or + dns.rdatatype.RRSIG, then the covers value will be the rdata + type the SIG/RRSIG covers. The library treats the SIG and RRSIG + types as if they were a family of + types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much + easier to work with than if RRSIGs covering different rdata + types were aggregated into a single RRSIG rdataset. + + *create*, a ``bool``. If True, create the rdataset if it is not found. + + Returns a ``dns.rdataset.Rdataset`` or ``None``. + """ + + try: + rds = self.find_rdataset(rdclass, rdtype, covers, create) + except KeyError: + rds = None + return rds + + def delete_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE): + """Delete the rdataset matching the specified properties in the + current node. + + If a matching rdataset does not exist, it is not an error. + + *rdclass*, an ``int``, the class of the rdataset. + + *rdtype*, an ``int``, the type of the rdataset. + + *covers*, an ``int``, the covered type. + """ + + rds = self.get_rdataset(rdclass, rdtype, covers) + if rds is not None: + self.rdatasets.remove(rds) + + def replace_rdataset(self, replacement): + """Replace an rdataset. + + It is not an error if there is no rdataset matching *replacement*. + + Ownership of the *replacement* object is transferred to the node; + in other words, this method does not store a copy of *replacement* + at the node, it stores *replacement* itself. + + *replacement*, a ``dns.rdataset.Rdataset``. + + Raises ``ValueError`` if *replacement* is not a + ``dns.rdataset.Rdataset``. + """ + + if not isinstance(replacement, dns.rdataset.Rdataset): + raise ValueError('replacement is not an rdataset') + if isinstance(replacement, dns.rrset.RRset): + # RRsets are not good replacements as the match() method + # is not compatible. + replacement = replacement.to_rdataset() + self.delete_rdataset(replacement.rdclass, replacement.rdtype, + replacement.covers) + self._append_rdataset(replacement) + + def classify(self): + """Classify a node. + + A node which contains a CNAME or RRSIG(CNAME) is a + ``NodeKind.CNAME`` node. + + A node which contains only "neutral" types, i.e. types allowed to + co-exist with a CNAME, is a ``NodeKind.NEUTRAL`` node. The neutral + types are NSEC, NSEC3, KEY, and their associated RRSIGS. An empty node + is also considered neutral. + + A node which contains some rdataset which is not a CNAME, RRSIG(CNAME), + or a neutral type is a a ``NodeKind.REGULAR`` node. Regular nodes are + also commonly referred to as "other data". + """ + for rdataset in self.rdatasets: + kind = NodeKind.classify(rdataset.rdtype, rdataset.covers) + if kind != NodeKind.NEUTRAL: + return kind + return NodeKind.NEUTRAL + + def is_immutable(self): + return False + + +@dns.immutable.immutable +class ImmutableNode(Node): + def __init__(self, node): + super().__init__() + self.rdatasets = tuple( + [dns.rdataset.ImmutableRdataset(rds) for rds in node.rdatasets] + ) + + def find_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE, + create=False): + if create: + raise TypeError("immutable") + return super().find_rdataset(rdclass, rdtype, covers, False) + + def get_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE, + create=False): + if create: + raise TypeError("immutable") + return super().get_rdataset(rdclass, rdtype, covers, False) + + def delete_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE): + raise TypeError("immutable") + + def replace_rdataset(self, replacement): + raise TypeError("immutable") + + def is_immutable(self): + return True diff --git a/venv/lib/python3.10/site-packages/dns/node.pyi b/venv/lib/python3.10/site-packages/dns/node.pyi new file mode 100644 index 0000000..0997edf --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/node.pyi @@ -0,0 +1,17 @@ +from typing import List, Optional, Union +from . import rdataset, rdatatype, name +class Node: + def __init__(self): + self.rdatasets : List[rdataset.Rdataset] + def to_text(self, name : Union[str,name.Name], **kw) -> str: + ... + def find_rdataset(self, rdclass : int, rdtype : int, covers=rdatatype.NONE, + create=False) -> rdataset.Rdataset: + ... + def get_rdataset(self, rdclass : int, rdtype : int, covers=rdatatype.NONE, + create=False) -> Optional[rdataset.Rdataset]: + ... + def delete_rdataset(self, rdclass : int, rdtype : int, covers=rdatatype.NONE): + ... + def replace_rdataset(self, replacement : rdataset.Rdataset) -> None: + ... diff --git a/venv/lib/python3.10/site-packages/dns/opcode.py b/venv/lib/python3.10/site-packages/dns/opcode.py new file mode 100644 index 0000000..5cf6143 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/opcode.py @@ -0,0 +1,115 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Opcodes.""" + +import dns.enum +import dns.exception + +class Opcode(dns.enum.IntEnum): + #: Query + QUERY = 0 + #: Inverse Query (historical) + IQUERY = 1 + #: Server Status (unspecified and unimplemented anywhere) + STATUS = 2 + #: Notify + NOTIFY = 4 + #: Dynamic Update + UPDATE = 5 + + @classmethod + def _maximum(cls): + return 15 + + @classmethod + def _unknown_exception_class(cls): + return UnknownOpcode + + +class UnknownOpcode(dns.exception.DNSException): + """An DNS opcode is unknown.""" + + +def from_text(text): + """Convert text into an opcode. + + *text*, a ``str``, the textual opcode + + Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. + + Returns an ``int``. + """ + + return Opcode.from_text(text) + + +def from_flags(flags): + """Extract an opcode from DNS message flags. + + *flags*, an ``int``, the DNS flags. + + Returns an ``int``. + """ + + return (flags & 0x7800) >> 11 + + +def to_flags(value): + """Convert an opcode to a value suitable for ORing into DNS message + flags. + + *value*, an ``int``, the DNS opcode value. + + Returns an ``int``. + """ + + return (value << 11) & 0x7800 + + +def to_text(value): + """Convert an opcode to text. + + *value*, an ``int`` the opcode value, + + Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. + + Returns a ``str``. + """ + + return Opcode.to_text(value) + + +def is_update(flags): + """Is the opcode in flags UPDATE? + + *flags*, an ``int``, the DNS message flags. + + Returns a ``bool``. + """ + + return from_flags(flags) == Opcode.UPDATE + +### BEGIN generated Opcode constants + +QUERY = Opcode.QUERY +IQUERY = Opcode.IQUERY +STATUS = Opcode.STATUS +NOTIFY = Opcode.NOTIFY +UPDATE = Opcode.UPDATE + +### END generated Opcode constants diff --git a/venv/lib/python3.10/site-packages/dns/py.typed b/venv/lib/python3.10/site-packages/dns/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/dns/query.py b/venv/lib/python3.10/site-packages/dns/query.py new file mode 100644 index 0000000..6d924b5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/query.py @@ -0,0 +1,1154 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Talk to a DNS server.""" + +import contextlib +import enum +import errno +import os +import selectors +import socket +import struct +import time +import base64 +import urllib.parse + +import dns.exception +import dns.inet +import dns.name +import dns.message +import dns.rcode +import dns.rdataclass +import dns.rdatatype +import dns.serial +import dns.xfr + +try: + import requests + from requests_toolbelt.adapters.source import SourceAddressAdapter + from requests_toolbelt.adapters.host_header_ssl import HostHeaderSSLAdapter + _have_requests = True +except ImportError: # pragma: no cover + _have_requests = False + +_have_httpx = False +_have_http2 = False +try: + import httpx + _have_httpx = True + try: + # See if http2 support is available. + with httpx.Client(http2=True): + _have_http2 = True + except Exception: + pass +except ImportError: # pragma: no cover + pass + +have_doh = _have_requests or _have_httpx + +try: + import ssl +except ImportError: # pragma: no cover + class ssl: # type: ignore + + class WantReadException(Exception): + pass + + class WantWriteException(Exception): + pass + + class SSLSocket: + pass + + def create_default_context(self, *args, **kwargs): + raise Exception('no ssl support') + +# Function used to create a socket. Can be overridden if needed in special +# situations. +socket_factory = socket.socket + +class UnexpectedSource(dns.exception.DNSException): + """A DNS query response came from an unexpected address or port.""" + + +class BadResponse(dns.exception.FormError): + """A DNS query response does not respond to the question asked.""" + + +class NoDOH(dns.exception.DNSException): + """DNS over HTTPS (DOH) was requested but the requests module is not + available.""" + + +# for backwards compatibility +TransferError = dns.xfr.TransferError + + +def _compute_times(timeout): + now = time.time() + if timeout is None: + return (now, None) + else: + return (now, now + timeout) + + +def _wait_for(fd, readable, writable, _, expiration): + # Use the selected selector class to wait for any of the specified + # events. An "expiration" absolute time is converted into a relative + # timeout. + # + # The unused parameter is 'error', which is always set when + # selecting for read or write, and we have no error-only selects. + + if readable and isinstance(fd, ssl.SSLSocket) and fd.pending() > 0: + return True + sel = _selector_class() + events = 0 + if readable: + events |= selectors.EVENT_READ + if writable: + events |= selectors.EVENT_WRITE + if events: + sel.register(fd, events) + if expiration is None: + timeout = None + else: + timeout = expiration - time.time() + if timeout <= 0.0: + raise dns.exception.Timeout + if not sel.select(timeout): + raise dns.exception.Timeout + + +def _set_selector_class(selector_class): + # Internal API. Do not use. + + global _selector_class + + _selector_class = selector_class + +if hasattr(selectors, 'PollSelector'): + # Prefer poll() on platforms that support it because it has no + # limits on the maximum value of a file descriptor (plus it will + # be more efficient for high values). + _selector_class = selectors.PollSelector +else: + _selector_class = selectors.SelectSelector # pragma: no cover + + +def _wait_for_readable(s, expiration): + _wait_for(s, True, False, True, expiration) + + +def _wait_for_writable(s, expiration): + _wait_for(s, False, True, True, expiration) + + +def _addresses_equal(af, a1, a2): + # Convert the first value of the tuple, which is a textual format + # address into binary form, so that we are not confused by different + # textual representations of the same address + try: + n1 = dns.inet.inet_pton(af, a1[0]) + n2 = dns.inet.inet_pton(af, a2[0]) + except dns.exception.SyntaxError: + return False + return n1 == n2 and a1[1:] == a2[1:] + + +def _matches_destination(af, from_address, destination, ignore_unexpected): + # Check that from_address is appropriate for a response to a query + # sent to destination. + if not destination: + return True + if _addresses_equal(af, from_address, destination) or \ + (dns.inet.is_multicast(destination[0]) and + from_address[1:] == destination[1:]): + return True + elif ignore_unexpected: + return False + raise UnexpectedSource(f'got a response from {from_address} instead of ' + f'{destination}') + + +def _destination_and_source(where, port, source, source_port, + where_must_be_address=True): + # Apply defaults and compute destination and source tuples + # suitable for use in connect(), sendto(), or bind(). + af = None + destination = None + try: + af = dns.inet.af_for_address(where) + destination = where + except Exception: + if where_must_be_address: + raise + # URLs are ok so eat the exception + if source: + saf = dns.inet.af_for_address(source) + if af: + # We know the destination af, so source had better agree! + if saf != af: + raise ValueError('different address families for source ' + + 'and destination') + else: + # We didn't know the destination af, but we know the source, + # so that's our af. + af = saf + if source_port and not source: + # Caller has specified a source_port but not an address, so we + # need to return a source, and we need to use the appropriate + # wildcard address as the address. + if af == socket.AF_INET: + source = '0.0.0.0' + elif af == socket.AF_INET6: + source = '::' + else: + raise ValueError('source_port specified but address family is ' + 'unknown') + # Convert high-level (address, port) tuples into low-level address + # tuples. + if destination: + destination = dns.inet.low_level_address_tuple((destination, port), af) + if source: + source = dns.inet.low_level_address_tuple((source, source_port), af) + return (af, destination, source) + +def _make_socket(af, type, source, ssl_context=None, server_hostname=None): + s = socket_factory(af, type) + try: + s.setblocking(False) + if source is not None: + s.bind(source) + if ssl_context: + return ssl_context.wrap_socket(s, do_handshake_on_connect=False, + server_hostname=server_hostname) + else: + return s + except Exception: + s.close() + raise + +def https(q, where, timeout=None, port=443, source=None, source_port=0, + one_rr_per_rrset=False, ignore_trailing=False, + session=None, path='/dns-query', post=True, + bootstrap_address=None, verify=True): + """Return the response obtained after sending a query via DNS-over-HTTPS. + + *q*, a ``dns.message.Message``, the query to send. + + *where*, a ``str``, the nameserver IP address or the full URL. If an IP + address is given, the URL will be constructed using the following schema: + https://:/. + + *timeout*, a ``float`` or ``None``, the number of seconds to + wait before the query times out. If ``None``, the default, wait forever. + + *port*, a ``int``, the port to send the query to. The default is 443. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *session*, an ``httpx.Client`` or ``requests.session.Session``. If + provided, the client/session to use to send the queries. + + *path*, a ``str``. If *where* is an IP address, then *path* will be used to + construct the URL to send the DNS query to. + + *post*, a ``bool``. If ``True``, the default, POST method will be used. + + *bootstrap_address*, a ``str``, the IP address to use to bypass the + system's DNS resolver. + + *verify*, a ``str``, containing a path to a certificate file or directory. + + Returns a ``dns.message.Message``. + """ + + if not have_doh: + raise NoDOH('Neither httpx nor requests is available.') # pragma: no cover + + _httpx_ok = _have_httpx + + wire = q.to_wire() + (af, _, source) = _destination_and_source(where, port, source, source_port, + False) + transport_adapter = None + transport = None + headers = { + "accept": "application/dns-message" + } + if af is not None: + if af == socket.AF_INET: + url = 'https://{}:{}{}'.format(where, port, path) + elif af == socket.AF_INET6: + url = 'https://[{}]:{}{}'.format(where, port, path) + elif bootstrap_address is not None: + _httpx_ok = False + split_url = urllib.parse.urlsplit(where) + headers['Host'] = split_url.hostname + url = where.replace(split_url.hostname, bootstrap_address) + if _have_requests: + transport_adapter = HostHeaderSSLAdapter() + else: + url = where + if source is not None: + # set source port and source address + if _have_httpx: + if source_port == 0: + transport = httpx.HTTPTransport(local_address=source[0]) + else: + _httpx_ok = False + if _have_requests: + transport_adapter = SourceAddressAdapter(source) + + if session: + if _have_httpx: + _is_httpx = isinstance(session, httpx.Client) + else: + _is_httpx = False + if _is_httpx and not _httpx_ok: + raise NoDOH('Session is httpx, but httpx cannot be used for ' + 'the requested operation.') + else: + _is_httpx = _httpx_ok + + if not _httpx_ok and not _have_requests: + raise NoDOH('Cannot use httpx for this operation, and ' + 'requests is not available.') + + with contextlib.ExitStack() as stack: + if not session: + if _is_httpx: + session = stack.enter_context(httpx.Client(http1=True, + http2=_have_http2, + verify=verify, + transport=transport)) + else: + session = stack.enter_context(requests.sessions.Session()) + + if transport_adapter: + session.mount(url, transport_adapter) + + # see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH + # GET and POST examples + if post: + headers.update({ + "content-type": "application/dns-message", + "content-length": str(len(wire)) + }) + if _is_httpx: + response = session.post(url, headers=headers, content=wire, + timeout=timeout) + else: + response = session.post(url, headers=headers, data=wire, + timeout=timeout, verify=verify) + else: + wire = base64.urlsafe_b64encode(wire).rstrip(b"=") + if _is_httpx: + wire = wire.decode() # httpx does a repr() if we give it bytes + response = session.get(url, headers=headers, + timeout=timeout, + params={"dns": wire}) + else: + response = session.get(url, headers=headers, + timeout=timeout, verify=verify, + params={"dns": wire}) + + # see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH + # status codes + if response.status_code < 200 or response.status_code > 299: + raise ValueError('{} responded with status code {}' + '\nResponse body: {}'.format(where, + response.status_code, + response.content)) + r = dns.message.from_wire(response.content, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing) + r.time = response.elapsed + if not q.is_response(r): + raise BadResponse + return r + +def _udp_recv(sock, max_size, expiration): + """Reads a datagram from the socket. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + while True: + try: + return sock.recvfrom(max_size) + except BlockingIOError: + _wait_for_readable(sock, expiration) + + +def _udp_send(sock, data, destination, expiration): + """Sends the specified datagram to destination over the socket. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + while True: + try: + if destination: + return sock.sendto(data, destination) + else: + return sock.send(data) + except BlockingIOError: # pragma: no cover + _wait_for_writable(sock, expiration) + + +def send_udp(sock, what, destination, expiration=None): + """Send a DNS message to the specified UDP socket. + + *sock*, a ``socket``. + + *what*, a ``bytes`` or ``dns.message.Message``, the message to send. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where to send the query. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + what = what.to_wire() + sent_time = time.time() + n = _udp_send(sock, what, destination, expiration) + return (n, sent_time) + + +def receive_udp(sock, destination=None, expiration=None, + ignore_unexpected=False, one_rr_per_rrset=False, + keyring=None, request_mac=b'', ignore_trailing=False, + raise_on_truncation=False): + """Read a DNS message from a UDP socket. + + *sock*, a ``socket``. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where the message is expected to arrive from. + When receiving a response, this would be where the associated query was + sent. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from + unexpected sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *request_mac*, a ``bytes``, the MAC of the request (for TSIG). + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if + the TC bit is set. + + Raises if the message is malformed, if network errors occur, of if + there is a timeout. + + If *destination* is not ``None``, returns a ``(dns.message.Message, float)`` + tuple of the received message and the received time. + + If *destination* is ``None``, returns a + ``(dns.message.Message, float, tuple)`` + tuple of the received message, the received time, and the address where + the message arrived from. + """ + + wire = b'' + while True: + (wire, from_address) = _udp_recv(sock, 65535, expiration) + if _matches_destination(sock.family, from_address, destination, + ignore_unexpected): + break + received_time = time.time() + r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + raise_on_truncation=raise_on_truncation) + if destination: + return (r, received_time) + else: + return (r, received_time, from_address) + +def udp(q, where, timeout=None, port=53, source=None, source_port=0, + ignore_unexpected=False, one_rr_per_rrset=False, ignore_trailing=False, + raise_on_truncation=False, sock=None): + """Return the response obtained after sending a query via UDP. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from + unexpected sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if + the TC bit is set. + + *sock*, a ``socket.socket``, or ``None``, the socket to use for the + query. If ``None``, the default, a socket is created. Note that + if a socket is provided, it must be a nonblocking datagram socket, + and the *source* and *source_port* are ignored. + + Returns a ``dns.message.Message``. + """ + + wire = q.to_wire() + (af, destination, source) = _destination_and_source(where, port, + source, source_port) + (begin_time, expiration) = _compute_times(timeout) + with contextlib.ExitStack() as stack: + if sock: + s = sock + else: + s = stack.enter_context(_make_socket(af, socket.SOCK_DGRAM, source)) + send_udp(s, wire, destination, expiration) + (r, received_time) = receive_udp(s, destination, expiration, + ignore_unexpected, one_rr_per_rrset, + q.keyring, q.mac, ignore_trailing, + raise_on_truncation) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + +def udp_with_fallback(q, where, timeout=None, port=53, source=None, + source_port=0, ignore_unexpected=False, + one_rr_per_rrset=False, ignore_trailing=False, + udp_sock=None, tcp_sock=None): + """Return the response to the query, trying UDP first and falling back + to TCP if UDP results in a truncated response. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from + unexpected sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *udp_sock*, a ``socket.socket``, or ``None``, the socket to use for the + UDP query. If ``None``, the default, a socket is created. Note that + if a socket is provided, it must be a nonblocking datagram socket, + and the *source* and *source_port* are ignored for the UDP query. + + *tcp_sock*, a ``socket.socket``, or ``None``, the connected socket to use for the + TCP query. If ``None``, the default, a socket is created. Note that + if a socket is provided, it must be a nonblocking connected stream + socket, and *where*, *source* and *source_port* are ignored for the TCP + query. + + Returns a (``dns.message.Message``, tcp) tuple where tcp is ``True`` + if and only if TCP was used. + """ + try: + response = udp(q, where, timeout, port, source, source_port, + ignore_unexpected, one_rr_per_rrset, + ignore_trailing, True, udp_sock) + return (response, False) + except dns.message.Truncated: + response = tcp(q, where, timeout, port, source, source_port, + one_rr_per_rrset, ignore_trailing, tcp_sock) + return (response, True) + +def _net_read(sock, count, expiration): + """Read the specified number of bytes from sock. Keep trying until we + either get the desired amount, or we hit EOF. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + s = b'' + while count > 0: + try: + n = sock.recv(count) + if n == b'': + raise EOFError + count -= len(n) + s += n + except (BlockingIOError, ssl.SSLWantReadError): + _wait_for_readable(sock, expiration) + except ssl.SSLWantWriteError: # pragma: no cover + _wait_for_writable(sock, expiration) + return s + + +def _net_write(sock, data, expiration): + """Write the specified data to the socket. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + current = 0 + l = len(data) + while current < l: + try: + current += sock.send(data[current:]) + except (BlockingIOError, ssl.SSLWantWriteError): + _wait_for_writable(sock, expiration) + except ssl.SSLWantReadError: # pragma: no cover + _wait_for_readable(sock, expiration) + + +def send_tcp(sock, what, expiration=None): + """Send a DNS message to the specified TCP socket. + + *sock*, a ``socket``. + + *what*, a ``bytes`` or ``dns.message.Message``, the message to send. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + what = what.to_wire() + l = len(what) + # copying the wire into tcpmsg is inefficient, but lets us + # avoid writev() or doing a short write that would get pushed + # onto the net + tcpmsg = struct.pack("!H", l) + what + sent_time = time.time() + _net_write(sock, tcpmsg, expiration) + return (len(tcpmsg), sent_time) + +def receive_tcp(sock, expiration=None, one_rr_per_rrset=False, + keyring=None, request_mac=b'', ignore_trailing=False): + """Read a DNS message from a TCP socket. + + *sock*, a ``socket``. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *request_mac*, a ``bytes``, the MAC of the request (for TSIG). + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + Raises if the message is malformed, if network errors occur, of if + there is a timeout. + + Returns a ``(dns.message.Message, float)`` tuple of the received message + and the received time. + """ + + ldata = _net_read(sock, 2, expiration) + (l,) = struct.unpack("!H", ldata) + wire = _net_read(sock, l, expiration) + received_time = time.time() + r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing) + return (r, received_time) + +def _connect(s, address, expiration): + err = s.connect_ex(address) + if err == 0: + return + if err in (errno.EINPROGRESS, errno.EWOULDBLOCK, errno.EALREADY): + _wait_for_writable(s, expiration) + err = s.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) + if err != 0: + raise OSError(err, os.strerror(err)) + + +def tcp(q, where, timeout=None, port=53, source=None, source_port=0, + one_rr_per_rrset=False, ignore_trailing=False, sock=None): + """Return the response obtained after sending a query via TCP. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *sock*, a ``socket.socket``, or ``None``, the connected socket to use for the + query. If ``None``, the default, a socket is created. Note that + if a socket is provided, it must be a nonblocking connected stream + socket, and *where*, *port*, *source* and *source_port* are ignored. + + Returns a ``dns.message.Message``. + """ + + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + with contextlib.ExitStack() as stack: + if sock: + s = sock + else: + (af, destination, source) = _destination_and_source(where, port, + source, + source_port) + s = stack.enter_context(_make_socket(af, socket.SOCK_STREAM, + source)) + _connect(s, destination, expiration) + send_tcp(s, wire, expiration) + (r, received_time) = receive_tcp(s, expiration, one_rr_per_rrset, + q.keyring, q.mac, ignore_trailing) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + + +def _tls_handshake(s, expiration): + while True: + try: + s.do_handshake() + return + except ssl.SSLWantReadError: + _wait_for_readable(s, expiration) + except ssl.SSLWantWriteError: # pragma: no cover + _wait_for_writable(s, expiration) + + +def tls(q, where, timeout=None, port=853, source=None, source_port=0, + one_rr_per_rrset=False, ignore_trailing=False, sock=None, + ssl_context=None, server_hostname=None): + """Return the response obtained after sending a query via TLS. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 853. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *sock*, an ``ssl.SSLSocket``, or ``None``, the socket to use for + the query. If ``None``, the default, a socket is created. Note + that if a socket is provided, it must be a nonblocking connected + SSL stream socket, and *where*, *port*, *source*, *source_port*, + and *ssl_context* are ignored. + + *ssl_context*, an ``ssl.SSLContext``, the context to use when establishing + a TLS connection. If ``None``, the default, creates one with the default + configuration. + + *server_hostname*, a ``str`` containing the server's hostname. The + default is ``None``, which means that no hostname is known, and if an + SSL context is created, hostname checking will be disabled. + + Returns a ``dns.message.Message``. + + """ + + if sock: + # + # If a socket was provided, there's no special TLS handling needed. + # + return tcp(q, where, timeout, port, source, source_port, + one_rr_per_rrset, ignore_trailing, sock) + + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + (af, destination, source) = _destination_and_source(where, port, + source, source_port) + if ssl_context is None and not sock: + ssl_context = ssl.create_default_context() + if server_hostname is None: + ssl_context.check_hostname = False + + with _make_socket(af, socket.SOCK_STREAM, source, ssl_context=ssl_context, + server_hostname=server_hostname) as s: + _connect(s, destination, expiration) + _tls_handshake(s, expiration) + send_tcp(s, wire, expiration) + (r, received_time) = receive_tcp(s, expiration, one_rr_per_rrset, + q.keyring, q.mac, ignore_trailing) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + + +def xfr(where, zone, rdtype=dns.rdatatype.AXFR, rdclass=dns.rdataclass.IN, + timeout=None, port=53, keyring=None, keyname=None, relativize=True, + lifetime=None, source=None, source_port=0, serial=0, + use_udp=False, keyalgorithm=dns.tsig.default_algorithm): + """Return a generator for the responses to a zone transfer. + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *zone*, a ``dns.name.Name`` or ``str``, the name of the zone to transfer. + + *rdtype*, an ``int`` or ``str``, the type of zone transfer. The + default is ``dns.rdatatype.AXFR``. ``dns.rdatatype.IXFR`` can be + used to do an incremental transfer instead. + + *rdclass*, an ``int`` or ``str``, the class of the zone transfer. + The default is ``dns.rdataclass.IN``. + + *timeout*, a ``float``, the number of seconds to wait for each + response message. If None, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *keyname*, a ``dns.name.Name`` or ``str``, the name of the TSIG + key to use. + + *relativize*, a ``bool``. If ``True``, all names in the zone will be + relativized to the zone origin. It is essential that the + relativize setting matches the one specified to + ``dns.zone.from_xfr()`` if using this generator to make a zone. + + *lifetime*, a ``float``, the total number of seconds to spend + doing the transfer. If ``None``, the default, then there is no + limit on the time the transfer may take. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *serial*, an ``int``, the SOA serial number to use as the base for + an IXFR diff sequence (only meaningful if *rdtype* is + ``dns.rdatatype.IXFR``). + + *use_udp*, a ``bool``. If ``True``, use UDP (only meaningful for IXFR). + + *keyalgorithm*, a ``dns.name.Name`` or ``str``, the TSIG algorithm to use. + + Raises on errors, and so does the generator. + + Returns a generator of ``dns.message.Message`` objects. + """ + + if isinstance(zone, str): + zone = dns.name.from_text(zone) + rdtype = dns.rdatatype.RdataType.make(rdtype) + q = dns.message.make_query(zone, rdtype, rdclass) + if rdtype == dns.rdatatype.IXFR: + rrset = dns.rrset.from_text(zone, 0, 'IN', 'SOA', + '. . %u 0 0 0 0' % serial) + q.authority.append(rrset) + if keyring is not None: + q.use_tsig(keyring, keyname, algorithm=keyalgorithm) + wire = q.to_wire() + (af, destination, source) = _destination_and_source(where, port, + source, source_port) + if use_udp and rdtype != dns.rdatatype.IXFR: + raise ValueError('cannot do a UDP AXFR') + sock_type = socket.SOCK_DGRAM if use_udp else socket.SOCK_STREAM + with _make_socket(af, sock_type, source) as s: + (_, expiration) = _compute_times(lifetime) + _connect(s, destination, expiration) + l = len(wire) + if use_udp: + _udp_send(s, wire, None, expiration) + else: + tcpmsg = struct.pack("!H", l) + wire + _net_write(s, tcpmsg, expiration) + done = False + delete_mode = True + expecting_SOA = False + soa_rrset = None + if relativize: + origin = zone + oname = dns.name.empty + else: + origin = None + oname = zone + tsig_ctx = None + while not done: + (_, mexpiration) = _compute_times(timeout) + if mexpiration is None or \ + (expiration is not None and mexpiration > expiration): + mexpiration = expiration + if use_udp: + (wire, _) = _udp_recv(s, 65535, mexpiration) + else: + ldata = _net_read(s, 2, mexpiration) + (l,) = struct.unpack("!H", ldata) + wire = _net_read(s, l, mexpiration) + is_ixfr = (rdtype == dns.rdatatype.IXFR) + r = dns.message.from_wire(wire, keyring=q.keyring, + request_mac=q.mac, xfr=True, + origin=origin, tsig_ctx=tsig_ctx, + multi=True, one_rr_per_rrset=is_ixfr) + rcode = r.rcode() + if rcode != dns.rcode.NOERROR: + raise TransferError(rcode) + tsig_ctx = r.tsig_ctx + answer_index = 0 + if soa_rrset is None: + if not r.answer or r.answer[0].name != oname: + raise dns.exception.FormError( + "No answer or RRset not for qname") + rrset = r.answer[0] + if rrset.rdtype != dns.rdatatype.SOA: + raise dns.exception.FormError("first RRset is not an SOA") + answer_index = 1 + soa_rrset = rrset.copy() + if rdtype == dns.rdatatype.IXFR: + if dns.serial.Serial(soa_rrset[0].serial) <= serial: + # + # We're already up-to-date. + # + done = True + else: + expecting_SOA = True + # + # Process SOAs in the answer section (other than the initial + # SOA in the first message). + # + for rrset in r.answer[answer_index:]: + if done: + raise dns.exception.FormError("answers after final SOA") + if rrset.rdtype == dns.rdatatype.SOA and rrset.name == oname: + if expecting_SOA: + if rrset[0].serial != serial: + raise dns.exception.FormError( + "IXFR base serial mismatch") + expecting_SOA = False + elif rdtype == dns.rdatatype.IXFR: + delete_mode = not delete_mode + # + # If this SOA RRset is equal to the first we saw then we're + # finished. If this is an IXFR we also check that we're + # seeing the record in the expected part of the response. + # + if rrset == soa_rrset and \ + (rdtype == dns.rdatatype.AXFR or + (rdtype == dns.rdatatype.IXFR and delete_mode)): + done = True + elif expecting_SOA: + # + # We made an IXFR request and are expecting another + # SOA RR, but saw something else, so this must be an + # AXFR response. + # + rdtype = dns.rdatatype.AXFR + expecting_SOA = False + if done and q.keyring and not r.had_tsig: + raise dns.exception.FormError("missing TSIG") + yield r + + +class UDPMode(enum.IntEnum): + """How should UDP be used in an IXFR from :py:func:`inbound_xfr()`? + + NEVER means "never use UDP; always use TCP" + TRY_FIRST means "try to use UDP but fall back to TCP if needed" + ONLY means "raise ``dns.xfr.UseTCP`` if trying UDP does not succeed" + """ + NEVER = 0 + TRY_FIRST = 1 + ONLY = 2 + + +def inbound_xfr(where, txn_manager, query=None, + port=53, timeout=None, lifetime=None, source=None, + source_port=0, udp_mode=UDPMode.NEVER): + """Conduct an inbound transfer and apply it via a transaction from the + txn_manager. + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *txn_manager*, a ``dns.transaction.TransactionManager``, the txn_manager + for this transfer (typically a ``dns.zone.Zone``). + + *query*, the query to send. If not supplied, a default query is + constructed using information from the *txn_manager*. + + *port*, an ``int``, the port send the message to. The default is 53. + + *timeout*, a ``float``, the number of seconds to wait for each + response message. If None, the default, wait forever. + + *lifetime*, a ``float``, the total number of seconds to spend + doing the transfer. If ``None``, the default, then there is no + limit on the time the transfer may take. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *udp_mode*, a ``dns.query.UDPMode``, determines how UDP is used + for IXFRs. The default is ``dns.UDPMode.NEVER``, i.e. only use + TCP. Other possibilities are ``dns.UDPMode.TRY_FIRST``, which + means "try UDP but fallback to TCP if needed", and + ``dns.UDPMode.ONLY``, which means "try UDP and raise + ``dns.xfr.UseTCP`` if it does not succeed. + + Raises on errors. + """ + if query is None: + (query, serial) = dns.xfr.make_query(txn_manager) + else: + serial = dns.xfr.extract_serial_from_query(query) + rdtype = query.question[0].rdtype + is_ixfr = rdtype == dns.rdatatype.IXFR + origin = txn_manager.from_wire_origin() + wire = query.to_wire() + (af, destination, source) = _destination_and_source(where, port, + source, source_port) + (_, expiration) = _compute_times(lifetime) + retry = True + while retry: + retry = False + if is_ixfr and udp_mode != UDPMode.NEVER: + sock_type = socket.SOCK_DGRAM + is_udp = True + else: + sock_type = socket.SOCK_STREAM + is_udp = False + with _make_socket(af, sock_type, source) as s: + _connect(s, destination, expiration) + if is_udp: + _udp_send(s, wire, None, expiration) + else: + tcpmsg = struct.pack("!H", len(wire)) + wire + _net_write(s, tcpmsg, expiration) + with dns.xfr.Inbound(txn_manager, rdtype, serial, + is_udp) as inbound: + done = False + tsig_ctx = None + while not done: + (_, mexpiration) = _compute_times(timeout) + if mexpiration is None or \ + (expiration is not None and mexpiration > expiration): + mexpiration = expiration + if is_udp: + (rwire, _) = _udp_recv(s, 65535, mexpiration) + else: + ldata = _net_read(s, 2, mexpiration) + (l,) = struct.unpack("!H", ldata) + rwire = _net_read(s, l, mexpiration) + r = dns.message.from_wire(rwire, keyring=query.keyring, + request_mac=query.mac, xfr=True, + origin=origin, tsig_ctx=tsig_ctx, + multi=(not is_udp), + one_rr_per_rrset=is_ixfr) + try: + done = inbound.process_message(r) + except dns.xfr.UseTCP: + assert is_udp # should not happen if we used TCP! + if udp_mode == UDPMode.ONLY: + raise + done = True + retry = True + udp_mode = UDPMode.NEVER + continue + tsig_ctx = r.tsig_ctx + if not retry and query.keyring and not r.had_tsig: + raise dns.exception.FormError("missing TSIG") diff --git a/venv/lib/python3.10/site-packages/dns/query.pyi b/venv/lib/python3.10/site-packages/dns/query.pyi new file mode 100644 index 0000000..a22e229 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/query.pyi @@ -0,0 +1,64 @@ +from typing import Optional, Union, Dict, Generator, Any +from . import tsig, rdatatype, rdataclass, name, message +from requests.sessions import Session + +import socket + +# If the ssl import works, then +# +# error: Name 'ssl' already defined (by an import) +# +# is expected and can be ignored. +try: + import ssl +except ImportError: + class ssl: # type: ignore + SSLContext : Dict = {} + +have_doh: bool + +def https(q : message.Message, where: str, timeout : Optional[float] = None, + port : Optional[int] = 443, source : Optional[str] = None, + source_port : Optional[int] = 0, + session: Optional[Session] = None, + path : Optional[str] = '/dns-query', post : Optional[bool] = True, + bootstrap_address : Optional[str] = None, + verify : Optional[bool] = True) -> message.Message: + pass + +def tcp(q : message.Message, where : str, timeout : float = None, port=53, + af : Optional[int] = None, source : Optional[str] = None, + source_port : Optional[int] = 0, + one_rr_per_rrset : Optional[bool] = False, + ignore_trailing : Optional[bool] = False, + sock : Optional[socket.socket] = None) -> message.Message: + pass + +def xfr(where : None, zone : Union[name.Name,str], rdtype=rdatatype.AXFR, + rdclass=rdataclass.IN, + timeout : Optional[float] = None, port=53, + keyring : Optional[Dict[name.Name, bytes]] = None, + keyname : Union[str,name.Name]= None, relativize=True, + lifetime : Optional[float] = None, + source : Optional[str] = None, source_port=0, serial=0, + use_udp : Optional[bool] = False, + keyalgorithm=tsig.default_algorithm) \ + -> Generator[Any,Any,message.Message]: + pass + +def udp(q : message.Message, where : str, timeout : Optional[float] = None, + port=53, source : Optional[str] = None, source_port : Optional[int] = 0, + ignore_unexpected : Optional[bool] = False, + one_rr_per_rrset : Optional[bool] = False, + ignore_trailing : Optional[bool] = False, + sock : Optional[socket.socket] = None) -> message.Message: + pass + +def tls(q : message.Message, where : str, timeout : Optional[float] = None, + port=53, source : Optional[str] = None, source_port : Optional[int] = 0, + one_rr_per_rrset : Optional[bool] = False, + ignore_trailing : Optional[bool] = False, + sock : Optional[socket.socket] = None, + ssl_context: Optional[ssl.SSLContext] = None, + server_hostname: Optional[str] = None) -> message.Message: + pass diff --git a/venv/lib/python3.10/site-packages/dns/rcode.py b/venv/lib/python3.10/site-packages/dns/rcode.py new file mode 100644 index 0000000..49fee69 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rcode.py @@ -0,0 +1,164 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Result Codes.""" + +import dns.enum +import dns.exception + +class Rcode(dns.enum.IntEnum): + #: No error + NOERROR = 0 + #: Format error + FORMERR = 1 + #: Server failure + SERVFAIL = 2 + #: Name does not exist ("Name Error" in RFC 1025 terminology). + NXDOMAIN = 3 + #: Not implemented + NOTIMP = 4 + #: Refused + REFUSED = 5 + #: Name exists. + YXDOMAIN = 6 + #: RRset exists. + YXRRSET = 7 + #: RRset does not exist. + NXRRSET = 8 + #: Not authoritative. + NOTAUTH = 9 + #: Name not in zone. + NOTZONE = 10 + #: DSO-TYPE Not Implemented + DSOTYPENI = 11 + #: Bad EDNS version. + BADVERS = 16 + #: TSIG Signature Failure + BADSIG = 16 + #: Key not recognized. + BADKEY = 17 + #: Signature out of time window. + BADTIME = 18 + #: Bad TKEY Mode. + BADMODE = 19 + #: Duplicate key name. + BADNAME = 20 + #: Algorithm not supported. + BADALG = 21 + #: Bad Truncation + BADTRUNC = 22 + #: Bad/missing Server Cookie + BADCOOKIE = 23 + + @classmethod + def _maximum(cls): + return 4095 + + @classmethod + def _unknown_exception_class(cls): + return UnknownRcode + + +class UnknownRcode(dns.exception.DNSException): + """A DNS rcode is unknown.""" + + +def from_text(text): + """Convert text into an rcode. + + *text*, a ``str``, the textual rcode or an integer in textual form. + + Raises ``dns.rcode.UnknownRcode`` if the rcode mnemonic is unknown. + + Returns an ``int``. + """ + + return Rcode.from_text(text) + + +def from_flags(flags, ednsflags): + """Return the rcode value encoded by flags and ednsflags. + + *flags*, an ``int``, the DNS flags field. + + *ednsflags*, an ``int``, the EDNS flags field. + + Raises ``ValueError`` if rcode is < 0 or > 4095 + + Returns an ``int``. + """ + + value = (flags & 0x000f) | ((ednsflags >> 20) & 0xff0) + return value + + +def to_flags(value): + """Return a (flags, ednsflags) tuple which encodes the rcode. + + *value*, an ``int``, the rcode. + + Raises ``ValueError`` if rcode is < 0 or > 4095. + + Returns an ``(int, int)`` tuple. + """ + + if value < 0 or value > 4095: + raise ValueError('rcode must be >= 0 and <= 4095') + v = value & 0xf + ev = (value & 0xff0) << 20 + return (v, ev) + + +def to_text(value, tsig=False): + """Convert rcode into text. + + *value*, an ``int``, the rcode. + + Raises ``ValueError`` if rcode is < 0 or > 4095. + + Returns a ``str``. + """ + + if tsig and value == Rcode.BADVERS: + return 'BADSIG' + return Rcode.to_text(value) + +### BEGIN generated Rcode constants + +NOERROR = Rcode.NOERROR +FORMERR = Rcode.FORMERR +SERVFAIL = Rcode.SERVFAIL +NXDOMAIN = Rcode.NXDOMAIN +NOTIMP = Rcode.NOTIMP +REFUSED = Rcode.REFUSED +YXDOMAIN = Rcode.YXDOMAIN +YXRRSET = Rcode.YXRRSET +NXRRSET = Rcode.NXRRSET +NOTAUTH = Rcode.NOTAUTH +NOTZONE = Rcode.NOTZONE +DSOTYPENI = Rcode.DSOTYPENI +BADVERS = Rcode.BADVERS +BADSIG = Rcode.BADSIG +BADKEY = Rcode.BADKEY +BADTIME = Rcode.BADTIME +BADMODE = Rcode.BADMODE +BADNAME = Rcode.BADNAME +BADALG = Rcode.BADALG +BADTRUNC = Rcode.BADTRUNC +BADCOOKIE = Rcode.BADCOOKIE + +### END generated Rcode constants diff --git a/venv/lib/python3.10/site-packages/dns/rdata.py b/venv/lib/python3.10/site-packages/dns/rdata.py new file mode 100644 index 0000000..6b5b5c5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdata.py @@ -0,0 +1,782 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdata.""" + +from importlib import import_module +import base64 +import binascii +import io +import inspect +import itertools +import random + +import dns.wire +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.ipv6 +import dns.name +import dns.rdataclass +import dns.rdatatype +import dns.tokenizer +import dns.ttl + +_chunksize = 32 + +# We currently allow comparisons for rdata with relative names for backwards +# compatibility, but in the future we will not, as these kinds of comparisons +# can lead to subtle bugs if code is not carefully written. +# +# This switch allows the future behavior to be turned on so code can be +# tested with it. +_allow_relative_comparisons = True + + +class NoRelativeRdataOrdering(dns.exception.DNSException): + """An attempt was made to do an ordered comparison of one or more + rdata with relative names. The only reliable way of sorting rdata + is to use non-relativized rdata. + + """ + + +def _wordbreak(data, chunksize=_chunksize, separator=b' '): + """Break a binary string into chunks of chunksize characters separated by + a space. + """ + + if not chunksize: + return data.decode() + return separator.join([data[i:i + chunksize] + for i + in range(0, len(data), chunksize)]).decode() + + +# pylint: disable=unused-argument + +def _hexify(data, chunksize=_chunksize, separator=b' ', **kw): + """Convert a binary string into its hex encoding, broken up into chunks + of chunksize characters separated by a separator. + """ + + return _wordbreak(binascii.hexlify(data), chunksize, separator) + + +def _base64ify(data, chunksize=_chunksize, separator=b' ', **kw): + """Convert a binary string into its base64 encoding, broken up into chunks + of chunksize characters separated by a separator. + """ + + return _wordbreak(base64.b64encode(data), chunksize, separator) + +# pylint: enable=unused-argument + +__escaped = b'"\\' + +def _escapify(qstring): + """Escape the characters in a quoted string which need it.""" + + if isinstance(qstring, str): + qstring = qstring.encode() + if not isinstance(qstring, bytearray): + qstring = bytearray(qstring) + + text = '' + for c in qstring: + if c in __escaped: + text += '\\' + chr(c) + elif c >= 0x20 and c < 0x7F: + text += chr(c) + else: + text += '\\%03d' % c + return text + + +def _truncate_bitmap(what): + """Determine the index of greatest byte that isn't all zeros, and + return the bitmap that contains all the bytes less than that index. + """ + + for i in range(len(what) - 1, -1, -1): + if what[i] != 0: + return what[0: i + 1] + return what[0:1] + +# So we don't have to edit all the rdata classes... +_constify = dns.immutable.constify + + +@dns.immutable.immutable +class Rdata: + """Base class for all DNS rdata types.""" + + __slots__ = ['rdclass', 'rdtype', 'rdcomment'] + + def __init__(self, rdclass, rdtype): + """Initialize an rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + """ + + self.rdclass = self._as_rdataclass(rdclass) + self.rdtype = self._as_rdatatype(rdtype) + self.rdcomment = None + + def _get_all_slots(self): + return itertools.chain.from_iterable(getattr(cls, '__slots__', []) + for cls in self.__class__.__mro__) + + def __getstate__(self): + # We used to try to do a tuple of all slots here, but it + # doesn't work as self._all_slots isn't available at + # __setstate__() time. Before that we tried to store a tuple + # of __slots__, but that didn't work as it didn't store the + # slots defined by ancestors. This older way didn't fail + # outright, but ended up with partially broken objects, e.g. + # if you unpickled an A RR it wouldn't have rdclass and rdtype + # attributes, and would compare badly. + state = {} + for slot in self._get_all_slots(): + state[slot] = getattr(self, slot) + return state + + def __setstate__(self, state): + for slot, val in state.items(): + object.__setattr__(self, slot, val) + if not hasattr(self, 'rdcomment'): + # Pickled rdata from 2.0.x might not have a rdcomment, so add + # it if needed. + object.__setattr__(self, 'rdcomment', None) + + def covers(self): + """Return the type a Rdata covers. + + DNS SIG/RRSIG rdatas apply to a specific type; this type is + returned by the covers() function. If the rdata type is not + SIG or RRSIG, dns.rdatatype.NONE is returned. This is useful when + creating rdatasets, allowing the rdataset to contain only RRSIGs + of a particular type, e.g. RRSIG(NS). + + Returns an ``int``. + """ + + return dns.rdatatype.NONE + + def extended_rdatatype(self): + """Return a 32-bit type value, the least significant 16 bits of + which are the ordinary DNS type, and the upper 16 bits of which are + the "covered" type, if any. + + Returns an ``int``. + """ + + return self.covers() << 16 | self.rdtype + + def to_text(self, origin=None, relativize=True, **kw): + """Convert an rdata to text format. + + Returns a ``str``. + """ + + raise NotImplementedError # pragma: no cover + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + raise NotImplementedError # pragma: no cover + + def to_wire(self, file=None, compress=None, origin=None, + canonicalize=False): + """Convert an rdata to wire format. + + Returns a ``bytes`` or ``None``. + """ + + if file: + return self._to_wire(file, compress, origin, canonicalize) + else: + f = io.BytesIO() + self._to_wire(f, compress, origin, canonicalize) + return f.getvalue() + + def to_generic(self, origin=None): + """Creates a dns.rdata.GenericRdata equivalent of this rdata. + + Returns a ``dns.rdata.GenericRdata``. + """ + return dns.rdata.GenericRdata(self.rdclass, self.rdtype, + self.to_wire(origin=origin)) + + def to_digestable(self, origin=None): + """Convert rdata to a format suitable for digesting in hashes. This + is also the DNSSEC canonical form. + + Returns a ``bytes``. + """ + + return self.to_wire(origin=origin, canonicalize=True) + + def __repr__(self): + covers = self.covers() + if covers == dns.rdatatype.NONE: + ctext = '' + else: + ctext = '(' + dns.rdatatype.to_text(covers) + ')' + return '' + + def __str__(self): + return self.to_text() + + def _cmp(self, other): + """Compare an rdata with another rdata of the same rdtype and + rdclass. + + For rdata with only absolute names: + Return < 0 if self < other in the DNSSEC ordering, 0 if self + == other, and > 0 if self > other. + For rdata with at least one relative names: + The rdata sorts before any rdata with only absolute names. + When compared with another relative rdata, all names are + made absolute as if they were relative to the root, as the + proper origin is not available. While this creates a stable + ordering, it is NOT guaranteed to be the DNSSEC ordering. + In the future, all ordering comparisons for rdata with + relative names will be disallowed. + """ + try: + our = self.to_digestable() + our_relative = False + except dns.name.NeedAbsoluteNameOrOrigin: + if _allow_relative_comparisons: + our = self.to_digestable(dns.name.root) + our_relative = True + try: + their = other.to_digestable() + their_relative = False + except dns.name.NeedAbsoluteNameOrOrigin: + if _allow_relative_comparisons: + their = other.to_digestable(dns.name.root) + their_relative = True + if _allow_relative_comparisons: + if our_relative != their_relative: + # For the purpose of comparison, all rdata with at least one + # relative name is less than an rdata with only absolute names. + if our_relative: + return -1 + else: + return 1 + elif our_relative or their_relative: + raise NoRelativeRdataOrdering + if our == their: + return 0 + elif our > their: + return 1 + else: + return -1 + + def __eq__(self, other): + if not isinstance(other, Rdata): + return False + if self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return False + our_relative = False + their_relative = False + try: + our = self.to_digestable() + except dns.name.NeedAbsoluteNameOrOrigin: + our = self.to_digestable(dns.name.root) + our_relative = True + try: + their = other.to_digestable() + except dns.name.NeedAbsoluteNameOrOrigin: + their = other.to_digestable(dns.name.root) + their_relative = True + if our_relative != their_relative: + return False + return our == their + + def __ne__(self, other): + if not isinstance(other, Rdata): + return True + if self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return True + return not self.__eq__(other) + + def __lt__(self, other): + if not isinstance(other, Rdata) or \ + self.rdclass != other.rdclass or self.rdtype != other.rdtype: + + return NotImplemented + return self._cmp(other) < 0 + + def __le__(self, other): + if not isinstance(other, Rdata) or \ + self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return NotImplemented + return self._cmp(other) <= 0 + + def __ge__(self, other): + if not isinstance(other, Rdata) or \ + self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return NotImplemented + return self._cmp(other) >= 0 + + def __gt__(self, other): + if not isinstance(other, Rdata) or \ + self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return NotImplemented + return self._cmp(other) > 0 + + def __hash__(self): + return hash(self.to_digestable(dns.name.root)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + raise NotImplementedError # pragma: no cover + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + raise NotImplementedError # pragma: no cover + + def replace(self, **kwargs): + """ + Create a new Rdata instance based on the instance replace was + invoked on. It is possible to pass different parameters to + override the corresponding properties of the base Rdata. + + Any field specific to the Rdata type can be replaced, but the + *rdtype* and *rdclass* fields cannot. + + Returns an instance of the same Rdata subclass as *self*. + """ + + # Get the constructor parameters. + parameters = inspect.signature(self.__init__).parameters + + # Ensure that all of the arguments correspond to valid fields. + # Don't allow rdclass or rdtype to be changed, though. + for key in kwargs: + if key == 'rdcomment': + continue + if key not in parameters: + raise AttributeError("'{}' object has no attribute '{}'" + .format(self.__class__.__name__, key)) + if key in ('rdclass', 'rdtype'): + raise AttributeError("Cannot overwrite '{}' attribute '{}'" + .format(self.__class__.__name__, key)) + + # Construct the parameter list. For each field, use the value in + # kwargs if present, and the current value otherwise. + args = (kwargs.get(key, getattr(self, key)) for key in parameters) + + # Create, validate, and return the new object. + rd = self.__class__(*args) + # The comment is not set in the constructor, so give it special + # handling. + rdcomment = kwargs.get('rdcomment', self.rdcomment) + if rdcomment is not None: + object.__setattr__(rd, 'rdcomment', rdcomment) + return rd + + # Type checking and conversion helpers. These are class methods as + # they don't touch object state and may be useful to others. + + @classmethod + def _as_rdataclass(cls, value): + return dns.rdataclass.RdataClass.make(value) + + @classmethod + def _as_rdatatype(cls, value): + return dns.rdatatype.RdataType.make(value) + + @classmethod + def _as_bytes(cls, value, encode=False, max_length=None, empty_ok=True): + if encode and isinstance(value, str): + value = value.encode() + elif isinstance(value, bytearray): + value = bytes(value) + elif not isinstance(value, bytes): + raise ValueError('not bytes') + if max_length is not None and len(value) > max_length: + raise ValueError('too long') + if not empty_ok and len(value) == 0: + raise ValueError('empty bytes not allowed') + return value + + @classmethod + def _as_name(cls, value): + # Note that proper name conversion (e.g. with origin and IDNA + # awareness) is expected to be done via from_text. This is just + # a simple thing for people invoking the constructor directly. + if isinstance(value, str): + return dns.name.from_text(value) + elif not isinstance(value, dns.name.Name): + raise ValueError('not a name') + return value + + @classmethod + def _as_uint8(cls, value): + if not isinstance(value, int): + raise ValueError('not an integer') + if value < 0 or value > 255: + raise ValueError('not a uint8') + return value + + @classmethod + def _as_uint16(cls, value): + if not isinstance(value, int): + raise ValueError('not an integer') + if value < 0 or value > 65535: + raise ValueError('not a uint16') + return value + + @classmethod + def _as_uint32(cls, value): + if not isinstance(value, int): + raise ValueError('not an integer') + if value < 0 or value > 4294967295: + raise ValueError('not a uint32') + return value + + @classmethod + def _as_uint48(cls, value): + if not isinstance(value, int): + raise ValueError('not an integer') + if value < 0 or value > 281474976710655: + raise ValueError('not a uint48') + return value + + @classmethod + def _as_int(cls, value, low=None, high=None): + if not isinstance(value, int): + raise ValueError('not an integer') + if low is not None and value < low: + raise ValueError('value too small') + if high is not None and value > high: + raise ValueError('value too large') + return value + + @classmethod + def _as_ipv4_address(cls, value): + if isinstance(value, str): + # call to check validity + dns.ipv4.inet_aton(value) + return value + elif isinstance(value, bytes): + return dns.ipv4.inet_ntoa(value) + else: + raise ValueError('not an IPv4 address') + + @classmethod + def _as_ipv6_address(cls, value): + if isinstance(value, str): + # call to check validity + dns.ipv6.inet_aton(value) + return value + elif isinstance(value, bytes): + return dns.ipv6.inet_ntoa(value) + else: + raise ValueError('not an IPv6 address') + + @classmethod + def _as_bool(cls, value): + if isinstance(value, bool): + return value + else: + raise ValueError('not a boolean') + + @classmethod + def _as_ttl(cls, value): + if isinstance(value, int): + return cls._as_int(value, 0, dns.ttl.MAX_TTL) + elif isinstance(value, str): + return dns.ttl.from_text(value) + else: + raise ValueError('not a TTL') + + @classmethod + def _as_tuple(cls, value, as_value): + try: + # For user convenience, if value is a singleton of the list + # element type, wrap it in a tuple. + return (as_value(value),) + except Exception: + # Otherwise, check each element of the iterable *value* + # against *as_value*. + return tuple(as_value(v) for v in value) + + # Processing order + + @classmethod + def _processing_order(cls, iterable): + items = list(iterable) + random.shuffle(items) + return items + + +class GenericRdata(Rdata): + + """Generic Rdata Class + + This class is used for rdata types for which we have no better + implementation. It implements the DNS "unknown RRs" scheme. + """ + + __slots__ = ['data'] + + def __init__(self, rdclass, rdtype, data): + super().__init__(rdclass, rdtype) + object.__setattr__(self, 'data', data) + + def to_text(self, origin=None, relativize=True, **kw): + return r'\# %d ' % len(self.data) + _hexify(self.data, **kw) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + token = tok.get() + if not token.is_identifier() or token.value != r'\#': + raise dns.exception.SyntaxError( + r'generic rdata does not start with \#') + length = tok.get_int() + hex = tok.concatenate_remaining_identifiers(True).encode() + data = binascii.unhexlify(hex) + if len(data) != length: + raise dns.exception.SyntaxError( + 'generic rdata hex data has wrong length') + return cls(rdclass, rdtype, data) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.data) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + return cls(rdclass, rdtype, parser.get_remaining()) + +_rdata_classes = {} +_module_prefix = 'dns.rdtypes' + +def get_rdata_class(rdclass, rdtype): + cls = _rdata_classes.get((rdclass, rdtype)) + if not cls: + cls = _rdata_classes.get((dns.rdatatype.ANY, rdtype)) + if not cls: + rdclass_text = dns.rdataclass.to_text(rdclass) + rdtype_text = dns.rdatatype.to_text(rdtype) + rdtype_text = rdtype_text.replace('-', '_') + try: + mod = import_module('.'.join([_module_prefix, + rdclass_text, rdtype_text])) + cls = getattr(mod, rdtype_text) + _rdata_classes[(rdclass, rdtype)] = cls + except ImportError: + try: + mod = import_module('.'.join([_module_prefix, + 'ANY', rdtype_text])) + cls = getattr(mod, rdtype_text) + _rdata_classes[(dns.rdataclass.ANY, rdtype)] = cls + _rdata_classes[(rdclass, rdtype)] = cls + except ImportError: + pass + if not cls: + cls = GenericRdata + _rdata_classes[(rdclass, rdtype)] = cls + return cls + + +def from_text(rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None, idna_codec=None): + """Build an rdata object from text format. + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_text() class method is called + with the parameters to this function. + + If *tok* is a ``str``, then a tokenizer is created and the string + is used as its input. + + *rdclass*, an ``int``, the rdataclass. + + *rdtype*, an ``int``, the rdatatype. + + *tok*, a ``dns.tokenizer.Tokenizer`` or a ``str``. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use if a tokenizer needs to be created. If + ``None``, the default IDNA 2003 encoder/decoder is used. If a + tokenizer is not created, then the codec associated with the tokenizer + is the one that is used. + + Returns an instance of the chosen Rdata subclass. + + """ + if isinstance(tok, str): + tok = dns.tokenizer.Tokenizer(tok, idna_codec=idna_codec) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + cls = get_rdata_class(rdclass, rdtype) + with dns.exception.ExceptionWrapper(dns.exception.SyntaxError): + rdata = None + if cls != GenericRdata: + # peek at first token + token = tok.get() + tok.unget(token) + if token.is_identifier() and \ + token.value == r'\#': + # + # Known type using the generic syntax. Extract the + # wire form from the generic syntax, and then run + # from_wire on it. + # + grdata = GenericRdata.from_text(rdclass, rdtype, tok, origin, + relativize, relativize_to) + rdata = from_wire(rdclass, rdtype, grdata.data, 0, + len(grdata.data), origin) + # + # If this comparison isn't equal, then there must have been + # compressed names in the wire format, which is an error, + # there being no reasonable context to decompress with. + # + rwire = rdata.to_wire() + if rwire != grdata.data: + raise dns.exception.SyntaxError('compressed data in ' + 'generic syntax form ' + 'of known rdatatype') + if rdata is None: + rdata = cls.from_text(rdclass, rdtype, tok, origin, relativize, + relativize_to) + token = tok.get_eol_as_token() + if token.comment is not None: + object.__setattr__(rdata, 'rdcomment', token.comment) + return rdata + + +def from_wire_parser(rdclass, rdtype, parser, origin=None): + """Build an rdata object from wire format + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_wire() class method is called + with the parameters to this function. + + *rdclass*, an ``int``, the rdataclass. + + *rdtype*, an ``int``, the rdatatype. + + *parser*, a ``dns.wire.Parser``, the parser, which should be + restricted to the rdata length. + + *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, + then names will be relativized to this origin. + + Returns an instance of the chosen Rdata subclass. + """ + + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + cls = get_rdata_class(rdclass, rdtype) + with dns.exception.ExceptionWrapper(dns.exception.FormError): + return cls.from_wire_parser(rdclass, rdtype, parser, origin) + + +def from_wire(rdclass, rdtype, wire, current, rdlen, origin=None): + """Build an rdata object from wire format + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_wire() class method is called + with the parameters to this function. + + *rdclass*, an ``int``, the rdataclass. + + *rdtype*, an ``int``, the rdatatype. + + *wire*, a ``bytes``, the wire-format message. + + *current*, an ``int``, the offset in wire of the beginning of + the rdata. + + *rdlen*, an ``int``, the length of the wire-format rdata + + *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, + then names will be relativized to this origin. + + Returns an instance of the chosen Rdata subclass. + """ + parser = dns.wire.Parser(wire, current) + with parser.restrict_to(rdlen): + return from_wire_parser(rdclass, rdtype, parser, origin) + + +class RdatatypeExists(dns.exception.DNSException): + """DNS rdatatype already exists.""" + supp_kwargs = {'rdclass', 'rdtype'} + fmt = "The rdata type with class {rdclass:d} and rdtype {rdtype:d} " + \ + "already exists." + + +def register_type(implementation, rdtype, rdtype_text, is_singleton=False, + rdclass=dns.rdataclass.IN): + """Dynamically register a module to handle an rdatatype. + + *implementation*, a module implementing the type in the usual dnspython + way. + + *rdtype*, an ``int``, the rdatatype to register. + + *rdtype_text*, a ``str``, the textual form of the rdatatype. + + *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. + RRsets of the type can have only one member.) + + *rdclass*, the rdataclass of the type, or ``dns.rdataclass.ANY`` if + it applies to all classes. + """ + + existing_cls = get_rdata_class(rdclass, rdtype) + if existing_cls != GenericRdata or dns.rdatatype.is_metatype(rdtype): + raise RdatatypeExists(rdclass=rdclass, rdtype=rdtype) + try: + if dns.rdatatype.RdataType(rdtype).name != rdtype_text: + raise RdatatypeExists(rdclass=rdclass, rdtype=rdtype) + except ValueError: + pass + _rdata_classes[(rdclass, rdtype)] = getattr(implementation, + rdtype_text.replace('-', '_')) + dns.rdatatype.register_type(rdtype, rdtype_text, is_singleton) diff --git a/venv/lib/python3.10/site-packages/dns/rdata.pyi b/venv/lib/python3.10/site-packages/dns/rdata.pyi new file mode 100644 index 0000000..f394791 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdata.pyi @@ -0,0 +1,19 @@ +from typing import Dict, Tuple, Any, Optional, BinaryIO +from .name import Name, IDNACodec +class Rdata: + def __init__(self): + self.address : str + def to_wire(self, file : Optional[BinaryIO], compress : Optional[Dict[Name,int]], origin : Optional[Name], canonicalize : Optional[bool]) -> Optional[bytes]: + ... + @classmethod + def from_text(cls, rdclass : int, rdtype : int, tok, origin=None, relativize=True): + ... +_rdata_modules : Dict[Tuple[Any,Rdata],Any] + +def from_text(rdclass : int, rdtype : int, tok : Optional[str], origin : Optional[Name] = None, + relativize : bool = True, relativize_to : Optional[Name] = None, + idna_codec : Optional[IDNACodec] = None): + ... + +def from_wire(rdclass : int, rdtype : int, wire : bytes, current : int, rdlen : int, origin : Optional[Name] = None): + ... diff --git a/venv/lib/python3.10/site-packages/dns/rdataclass.py b/venv/lib/python3.10/site-packages/dns/rdataclass.py new file mode 100644 index 0000000..41bba69 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdataclass.py @@ -0,0 +1,115 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Rdata Classes.""" + +import dns.enum +import dns.exception + +class RdataClass(dns.enum.IntEnum): + """DNS Rdata Class""" + RESERVED0 = 0 + IN = 1 + INTERNET = IN + CH = 3 + CHAOS = CH + HS = 4 + HESIOD = HS + NONE = 254 + ANY = 255 + + @classmethod + def _maximum(cls): + return 65535 + + @classmethod + def _short_name(cls): + return "class" + + @classmethod + def _prefix(cls): + return "CLASS" + + @classmethod + def _unknown_exception_class(cls): + return UnknownRdataclass + + +_metaclasses = {RdataClass.NONE, RdataClass.ANY} + + +class UnknownRdataclass(dns.exception.DNSException): + """A DNS class is unknown.""" + + +def from_text(text): + """Convert text into a DNS rdata class value. + + The input text can be a defined DNS RR class mnemonic or + instance of the DNS generic class syntax. + + For example, "IN" and "CLASS1" will both result in a value of 1. + + Raises ``dns.rdatatype.UnknownRdataclass`` if the class is unknown. + + Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. + + Returns an ``int``. + """ + + return RdataClass.from_text(text) + + +def to_text(value): + """Convert a DNS rdata class value to text. + + If the value has a known mnemonic, it will be used, otherwise the + DNS generic class syntax will be used. + + Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. + + Returns a ``str``. + """ + + return RdataClass.to_text(value) + + +def is_metaclass(rdclass): + """True if the specified class is a metaclass. + + The currently defined metaclasses are ANY and NONE. + + *rdclass* is an ``int``. + """ + + if rdclass in _metaclasses: + return True + return False + +### BEGIN generated RdataClass constants + +RESERVED0 = RdataClass.RESERVED0 +IN = RdataClass.IN +INTERNET = RdataClass.INTERNET +CH = RdataClass.CH +CHAOS = RdataClass.CHAOS +HS = RdataClass.HS +HESIOD = RdataClass.HESIOD +NONE = RdataClass.NONE +ANY = RdataClass.ANY + +### END generated RdataClass constants diff --git a/venv/lib/python3.10/site-packages/dns/rdataset.py b/venv/lib/python3.10/site-packages/dns/rdataset.py new file mode 100644 index 0000000..579bc96 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdataset.py @@ -0,0 +1,456 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdatasets (an rdataset is a set of rdatas of a given type and class)""" + +import io +import random +import struct + +import dns.exception +import dns.immutable +import dns.rdatatype +import dns.rdataclass +import dns.rdata +import dns.set + +# define SimpleSet here for backwards compatibility +SimpleSet = dns.set.Set + + +class DifferingCovers(dns.exception.DNSException): + """An attempt was made to add a DNS SIG/RRSIG whose covered type + is not the same as that of the other rdatas in the rdataset.""" + + +class IncompatibleTypes(dns.exception.DNSException): + """An attempt was made to add DNS RR data of an incompatible type.""" + + +class Rdataset(dns.set.Set): + + """A DNS rdataset.""" + + __slots__ = ['rdclass', 'rdtype', 'covers', 'ttl'] + + def __init__(self, rdclass, rdtype, covers=dns.rdatatype.NONE, ttl=0): + """Create a new rdataset of the specified class and type. + + *rdclass*, an ``int``, the rdataclass. + + *rdtype*, an ``int``, the rdatatype. + + *covers*, an ``int``, the covered rdatatype. + + *ttl*, an ``int``, the TTL. + """ + + super().__init__() + self.rdclass = rdclass + self.rdtype = rdtype + self.covers = covers + self.ttl = ttl + + def _clone(self): + obj = super()._clone() + obj.rdclass = self.rdclass + obj.rdtype = self.rdtype + obj.covers = self.covers + obj.ttl = self.ttl + return obj + + def update_ttl(self, ttl): + """Perform TTL minimization. + + Set the TTL of the rdataset to be the lesser of the set's current + TTL or the specified TTL. If the set contains no rdatas, set the TTL + to the specified TTL. + + *ttl*, an ``int`` or ``str``. + """ + ttl = dns.ttl.make(ttl) + if len(self) == 0: + self.ttl = ttl + elif ttl < self.ttl: + self.ttl = ttl + + def add(self, rd, ttl=None): # pylint: disable=arguments-differ + """Add the specified rdata to the rdataset. + + If the optional *ttl* parameter is supplied, then + ``self.update_ttl(ttl)`` will be called prior to adding the rdata. + + *rd*, a ``dns.rdata.Rdata``, the rdata + + *ttl*, an ``int``, the TTL. + + Raises ``dns.rdataset.IncompatibleTypes`` if the type and class + do not match the type and class of the rdataset. + + Raises ``dns.rdataset.DifferingCovers`` if the type is a signature + type and the covered type does not match that of the rdataset. + """ + + # + # If we're adding a signature, do some special handling to + # check that the signature covers the same type as the + # other rdatas in this rdataset. If this is the first rdata + # in the set, initialize the covers field. + # + if self.rdclass != rd.rdclass or self.rdtype != rd.rdtype: + raise IncompatibleTypes + if ttl is not None: + self.update_ttl(ttl) + if self.rdtype == dns.rdatatype.RRSIG or \ + self.rdtype == dns.rdatatype.SIG: + covers = rd.covers() + if len(self) == 0 and self.covers == dns.rdatatype.NONE: + self.covers = covers + elif self.covers != covers: + raise DifferingCovers + if dns.rdatatype.is_singleton(rd.rdtype) and len(self) > 0: + self.clear() + super().add(rd) + + def union_update(self, other): + self.update_ttl(other.ttl) + super().union_update(other) + + def intersection_update(self, other): + self.update_ttl(other.ttl) + super().intersection_update(other) + + def update(self, other): + """Add all rdatas in other to self. + + *other*, a ``dns.rdataset.Rdataset``, the rdataset from which + to update. + """ + + self.update_ttl(other.ttl) + super().update(other) + + def _rdata_repr(self): + def maybe_truncate(s): + if len(s) > 100: + return s[:100] + '...' + return s + return '[%s]' % ', '.join('<%s>' % maybe_truncate(str(rr)) + for rr in self) + + def __repr__(self): + if self.covers == 0: + ctext = '' + else: + ctext = '(' + dns.rdatatype.to_text(self.covers) + ')' + return '' + + def __str__(self): + return self.to_text() + + def __eq__(self, other): + if not isinstance(other, Rdataset): + return False + if self.rdclass != other.rdclass or \ + self.rdtype != other.rdtype or \ + self.covers != other.covers: + return False + return super().__eq__(other) + + def __ne__(self, other): + return not self.__eq__(other) + + def to_text(self, name=None, origin=None, relativize=True, + override_rdclass=None, want_comments=False, **kw): + """Convert the rdataset into DNS zone file format. + + See ``dns.name.Name.choose_relativity`` for more information + on how *origin* and *relativize* determine the way names + are emitted. + + Any additional keyword arguments are passed on to the rdata + ``to_text()`` method. + + *name*, a ``dns.name.Name``. If name is not ``None``, emit RRs with + *name* as the owner name. + + *origin*, a ``dns.name.Name`` or ``None``, the origin for relative + names. + + *relativize*, a ``bool``. If ``True``, names will be relativized + to *origin*. + + *override_rdclass*, a ``dns.rdataclass.RdataClass`` or ``None``. + If not ``None``, use this class instead of the Rdataset's class. + + *want_comments*, a ``bool``. If ``True``, emit comments for rdata + which have them. The default is ``False``. + """ + + if name is not None: + name = name.choose_relativity(origin, relativize) + ntext = str(name) + pad = ' ' + else: + ntext = '' + pad = '' + s = io.StringIO() + if override_rdclass is not None: + rdclass = override_rdclass + else: + rdclass = self.rdclass + if len(self) == 0: + # + # Empty rdatasets are used for the question section, and in + # some dynamic updates, so we don't need to print out the TTL + # (which is meaningless anyway). + # + s.write('{}{}{} {}\n'.format(ntext, pad, + dns.rdataclass.to_text(rdclass), + dns.rdatatype.to_text(self.rdtype))) + else: + for rd in self: + extra = '' + if want_comments: + if rd.rdcomment: + extra = f' ;{rd.rdcomment}' + s.write('%s%s%d %s %s %s%s\n' % + (ntext, pad, self.ttl, dns.rdataclass.to_text(rdclass), + dns.rdatatype.to_text(self.rdtype), + rd.to_text(origin=origin, relativize=relativize, + **kw), + extra)) + # + # We strip off the final \n for the caller's convenience in printing + # + return s.getvalue()[:-1] + + def to_wire(self, name, file, compress=None, origin=None, + override_rdclass=None, want_shuffle=True): + """Convert the rdataset to wire format. + + *name*, a ``dns.name.Name`` is the owner name to use. + + *file* is the file where the name is emitted (typically a + BytesIO file). + + *compress*, a ``dict``, is the compression table to use. If + ``None`` (the default), names will not be compressed. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then *origin* will be appended + to it. + + *override_rdclass*, an ``int``, is used as the class instead of the + class of the rdataset. This is useful when rendering rdatasets + associated with dynamic updates. + + *want_shuffle*, a ``bool``. If ``True``, then the order of the + Rdatas within the Rdataset will be shuffled before rendering. + + Returns an ``int``, the number of records emitted. + """ + + if override_rdclass is not None: + rdclass = override_rdclass + want_shuffle = False + else: + rdclass = self.rdclass + file.seek(0, io.SEEK_END) + if len(self) == 0: + name.to_wire(file, compress, origin) + stuff = struct.pack("!HHIH", self.rdtype, rdclass, 0, 0) + file.write(stuff) + return 1 + else: + if want_shuffle: + l = list(self) + random.shuffle(l) + else: + l = self + for rd in l: + name.to_wire(file, compress, origin) + stuff = struct.pack("!HHIH", self.rdtype, rdclass, + self.ttl, 0) + file.write(stuff) + start = file.tell() + rd.to_wire(file, compress, origin) + end = file.tell() + assert end - start < 65536 + file.seek(start - 2) + stuff = struct.pack("!H", end - start) + file.write(stuff) + file.seek(0, io.SEEK_END) + return len(self) + + def match(self, rdclass, rdtype, covers): + """Returns ``True`` if this rdataset matches the specified class, + type, and covers. + """ + if self.rdclass == rdclass and \ + self.rdtype == rdtype and \ + self.covers == covers: + return True + return False + + def processing_order(self): + """Return rdatas in a valid processing order according to the type's + specification. For example, MX records are in preference order from + lowest to highest preferences, with items of the same preference + shuffled. + + For types that do not define a processing order, the rdatas are + simply shuffled. + """ + if len(self) == 0: + return [] + else: + return self[0]._processing_order(iter(self)) + + +@dns.immutable.immutable +class ImmutableRdataset(Rdataset): + + """An immutable DNS rdataset.""" + + _clone_class = Rdataset + + def __init__(self, rdataset): + """Create an immutable rdataset from the specified rdataset.""" + + super().__init__(rdataset.rdclass, rdataset.rdtype, rdataset.covers, + rdataset.ttl) + self.items = dns.immutable.Dict(rdataset.items) + + def update_ttl(self, ttl): + raise TypeError('immutable') + + def add(self, rd, ttl=None): + raise TypeError('immutable') + + def union_update(self, other): + raise TypeError('immutable') + + def intersection_update(self, other): + raise TypeError('immutable') + + def update(self, other): + raise TypeError('immutable') + + def __delitem__(self, i): + raise TypeError('immutable') + + def __ior__(self, other): + raise TypeError('immutable') + + def __iand__(self, other): + raise TypeError('immutable') + + def __iadd__(self, other): + raise TypeError('immutable') + + def __isub__(self, other): + raise TypeError('immutable') + + def clear(self): + raise TypeError('immutable') + + def __copy__(self): + return ImmutableRdataset(super().copy()) + + def copy(self): + return ImmutableRdataset(super().copy()) + + def union(self, other): + return ImmutableRdataset(super().union(other)) + + def intersection(self, other): + return ImmutableRdataset(super().intersection(other)) + + def difference(self, other): + return ImmutableRdataset(super().difference(other)) + + +def from_text_list(rdclass, rdtype, ttl, text_rdatas, idna_codec=None, + origin=None, relativize=True, relativize_to=None): + """Create an rdataset with the specified class, type, and TTL, and with + the specified list of rdatas in text format. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use; if ``None``, the default IDNA 2003 + encoder/decoder is used. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + r = Rdataset(rdclass, rdtype) + r.update_ttl(ttl) + for t in text_rdatas: + rd = dns.rdata.from_text(r.rdclass, r.rdtype, t, origin, relativize, + relativize_to, idna_codec) + r.add(rd) + return r + + +def from_text(rdclass, rdtype, ttl, *text_rdatas): + """Create an rdataset with the specified class, type, and TTL, and with + the specified rdatas in text format. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + return from_text_list(rdclass, rdtype, ttl, text_rdatas) + + +def from_rdata_list(ttl, rdatas): + """Create an rdataset with the specified TTL, and with + the specified list of rdata objects. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + if len(rdatas) == 0: + raise ValueError("rdata list must not be empty") + r = None + for rd in rdatas: + if r is None: + r = Rdataset(rd.rdclass, rd.rdtype) + r.update_ttl(ttl) + r.add(rd) + return r + + +def from_rdata(ttl, *rdatas): + """Create an rdataset with the specified TTL, and with + the specified rdata objects. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + return from_rdata_list(ttl, rdatas) diff --git a/venv/lib/python3.10/site-packages/dns/rdataset.pyi b/venv/lib/python3.10/site-packages/dns/rdataset.pyi new file mode 100644 index 0000000..a7bbf2d --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdataset.pyi @@ -0,0 +1,58 @@ +from typing import Optional, Dict, List, Union +from io import BytesIO +from . import exception, name, set, rdatatype, rdata, rdataset + +class DifferingCovers(exception.DNSException): + """An attempt was made to add a DNS SIG/RRSIG whose covered type + is not the same as that of the other rdatas in the rdataset.""" + + +class IncompatibleTypes(exception.DNSException): + """An attempt was made to add DNS RR data of an incompatible type.""" + + +class Rdataset(set.Set): + def __init__(self, rdclass, rdtype, covers=rdatatype.NONE, ttl=0): + self.rdclass : int = rdclass + self.rdtype : int = rdtype + self.covers : int = covers + self.ttl : int = ttl + + def update_ttl(self, ttl : int) -> None: + ... + + def add(self, rd : rdata.Rdata, ttl : Optional[int] =None): + ... + + def union_update(self, other : Rdataset): + ... + + def intersection_update(self, other : Rdataset): + ... + + def update(self, other : Rdataset): + ... + + def to_text(self, name : Optional[name.Name] =None, origin : Optional[name.Name] =None, relativize=True, + override_rdclass : Optional[int] =None, **kw) -> bytes: + ... + + def to_wire(self, name : Optional[name.Name], file : BytesIO, compress : Optional[Dict[name.Name, int]] = None, origin : Optional[name.Name] = None, + override_rdclass : Optional[int] = None, want_shuffle=True) -> int: + ... + + def match(self, rdclass : int, rdtype : int, covers : int) -> bool: + ... + + +def from_text_list(rdclass : Union[int,str], rdtype : Union[int,str], ttl : int, text_rdatas : str, idna_codec : Optional[name.IDNACodec] = None) -> rdataset.Rdataset: + ... + +def from_text(rdclass : Union[int,str], rdtype : Union[int,str], ttl : int, *text_rdatas : str) -> rdataset.Rdataset: + ... + +def from_rdata_list(ttl : int, rdatas : List[rdata.Rdata]) -> rdataset.Rdataset: + ... + +def from_rdata(ttl : int, *rdatas : List[rdata.Rdata]) -> rdataset.Rdataset: + ... diff --git a/venv/lib/python3.10/site-packages/dns/rdatatype.py b/venv/lib/python3.10/site-packages/dns/rdatatype.py new file mode 100644 index 0000000..9499c7b --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdatatype.py @@ -0,0 +1,313 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Rdata Types.""" + +import dns.enum +import dns.exception + +class RdataType(dns.enum.IntEnum): + """DNS Rdata Type""" + TYPE0 = 0 + NONE = 0 + A = 1 + NS = 2 + MD = 3 + MF = 4 + CNAME = 5 + SOA = 6 + MB = 7 + MG = 8 + MR = 9 + NULL = 10 + WKS = 11 + PTR = 12 + HINFO = 13 + MINFO = 14 + MX = 15 + TXT = 16 + RP = 17 + AFSDB = 18 + X25 = 19 + ISDN = 20 + RT = 21 + NSAP = 22 + NSAP_PTR = 23 + SIG = 24 + KEY = 25 + PX = 26 + GPOS = 27 + AAAA = 28 + LOC = 29 + NXT = 30 + SRV = 33 + NAPTR = 35 + KX = 36 + CERT = 37 + A6 = 38 + DNAME = 39 + OPT = 41 + APL = 42 + DS = 43 + SSHFP = 44 + IPSECKEY = 45 + RRSIG = 46 + NSEC = 47 + DNSKEY = 48 + DHCID = 49 + NSEC3 = 50 + NSEC3PARAM = 51 + TLSA = 52 + SMIMEA = 53 + HIP = 55 + NINFO = 56 + CDS = 59 + CDNSKEY = 60 + OPENPGPKEY = 61 + CSYNC = 62 + ZONEMD = 63 + SVCB = 64 + HTTPS = 65 + SPF = 99 + UNSPEC = 103 + NID = 104 + L32 = 105 + L64 = 106 + LP = 107 + EUI48 = 108 + EUI64 = 109 + TKEY = 249 + TSIG = 250 + IXFR = 251 + AXFR = 252 + MAILB = 253 + MAILA = 254 + ANY = 255 + URI = 256 + CAA = 257 + AVC = 258 + AMTRELAY = 260 + TA = 32768 + DLV = 32769 + + @classmethod + def _maximum(cls): + return 65535 + + @classmethod + def _short_name(cls): + return "type" + + @classmethod + def _prefix(cls): + return "TYPE" + + @classmethod + def _unknown_exception_class(cls): + return UnknownRdatatype + +_registered_by_text = {} +_registered_by_value = {} + +_metatypes = {RdataType.OPT} + +_singletons = {RdataType.SOA, RdataType.NXT, RdataType.DNAME, + RdataType.NSEC, RdataType.CNAME} + + +class UnknownRdatatype(dns.exception.DNSException): + """DNS resource record type is unknown.""" + + +def from_text(text): + """Convert text into a DNS rdata type value. + + The input text can be a defined DNS RR type mnemonic or + instance of the DNS generic type syntax. + + For example, "NS" and "TYPE2" will both result in a value of 2. + + Raises ``dns.rdatatype.UnknownRdatatype`` if the type is unknown. + + Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. + + Returns an ``int``. + """ + + text = text.upper().replace('-', '_') + try: + return RdataType.from_text(text) + except UnknownRdatatype: + registered_type = _registered_by_text.get(text) + if registered_type: + return registered_type + raise + + +def to_text(value): + """Convert a DNS rdata type value to text. + + If the value has a known mnemonic, it will be used, otherwise the + DNS generic type syntax will be used. + + Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. + + Returns a ``str``. + """ + + text = RdataType.to_text(value) + if text.startswith("TYPE"): + registered_text = _registered_by_value.get(value) + if registered_text: + text = registered_text + return text.replace('_', '-') + + +def is_metatype(rdtype): + """True if the specified type is a metatype. + + *rdtype* is an ``int``. + + The currently defined metatypes are TKEY, TSIG, IXFR, AXFR, MAILA, + MAILB, ANY, and OPT. + + Returns a ``bool``. + """ + + return (256 > rdtype >= 128) or rdtype in _metatypes + + +def is_singleton(rdtype): + """Is the specified type a singleton type? + + Singleton types can only have a single rdata in an rdataset, or a single + RR in an RRset. + + The currently defined singleton types are CNAME, DNAME, NSEC, NXT, and + SOA. + + *rdtype* is an ``int``. + + Returns a ``bool``. + """ + + if rdtype in _singletons: + return True + return False + +# pylint: disable=redefined-outer-name +def register_type(rdtype, rdtype_text, is_singleton=False): + """Dynamically register an rdatatype. + + *rdtype*, an ``int``, the rdatatype to register. + + *rdtype_text*, a ``str``, the textual form of the rdatatype. + + *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. + RRsets of the type can have only one member.) + """ + + _registered_by_text[rdtype_text] = rdtype + _registered_by_value[rdtype] = rdtype_text + if is_singleton: + _singletons.add(rdtype) + +### BEGIN generated RdataType constants + +TYPE0 = RdataType.TYPE0 +NONE = RdataType.NONE +A = RdataType.A +NS = RdataType.NS +MD = RdataType.MD +MF = RdataType.MF +CNAME = RdataType.CNAME +SOA = RdataType.SOA +MB = RdataType.MB +MG = RdataType.MG +MR = RdataType.MR +NULL = RdataType.NULL +WKS = RdataType.WKS +PTR = RdataType.PTR +HINFO = RdataType.HINFO +MINFO = RdataType.MINFO +MX = RdataType.MX +TXT = RdataType.TXT +RP = RdataType.RP +AFSDB = RdataType.AFSDB +X25 = RdataType.X25 +ISDN = RdataType.ISDN +RT = RdataType.RT +NSAP = RdataType.NSAP +NSAP_PTR = RdataType.NSAP_PTR +SIG = RdataType.SIG +KEY = RdataType.KEY +PX = RdataType.PX +GPOS = RdataType.GPOS +AAAA = RdataType.AAAA +LOC = RdataType.LOC +NXT = RdataType.NXT +SRV = RdataType.SRV +NAPTR = RdataType.NAPTR +KX = RdataType.KX +CERT = RdataType.CERT +A6 = RdataType.A6 +DNAME = RdataType.DNAME +OPT = RdataType.OPT +APL = RdataType.APL +DS = RdataType.DS +SSHFP = RdataType.SSHFP +IPSECKEY = RdataType.IPSECKEY +RRSIG = RdataType.RRSIG +NSEC = RdataType.NSEC +DNSKEY = RdataType.DNSKEY +DHCID = RdataType.DHCID +NSEC3 = RdataType.NSEC3 +NSEC3PARAM = RdataType.NSEC3PARAM +TLSA = RdataType.TLSA +SMIMEA = RdataType.SMIMEA +HIP = RdataType.HIP +NINFO = RdataType.NINFO +CDS = RdataType.CDS +CDNSKEY = RdataType.CDNSKEY +OPENPGPKEY = RdataType.OPENPGPKEY +CSYNC = RdataType.CSYNC +ZONEMD = RdataType.ZONEMD +SVCB = RdataType.SVCB +HTTPS = RdataType.HTTPS +SPF = RdataType.SPF +UNSPEC = RdataType.UNSPEC +NID = RdataType.NID +L32 = RdataType.L32 +L64 = RdataType.L64 +LP = RdataType.LP +EUI48 = RdataType.EUI48 +EUI64 = RdataType.EUI64 +TKEY = RdataType.TKEY +TSIG = RdataType.TSIG +IXFR = RdataType.IXFR +AXFR = RdataType.AXFR +MAILB = RdataType.MAILB +MAILA = RdataType.MAILA +ANY = RdataType.ANY +URI = RdataType.URI +CAA = RdataType.CAA +AVC = RdataType.AVC +AMTRELAY = RdataType.AMTRELAY +TA = RdataType.TA +DLV = RdataType.DLV + +### END generated RdataType constants diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/AFSDB.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/AFSDB.py new file mode 100644 index 0000000..d7838e7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/AFSDB.py @@ -0,0 +1,46 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.mxbase +import dns.immutable + + +@dns.immutable.immutable +class AFSDB(dns.rdtypes.mxbase.UncompressedDowncasingMX): + + """AFSDB record""" + + # Use the property mechanism to make "subtype" an alias for the + # "preference" attribute, and "hostname" an alias for the "exchange" + # attribute. + # + # This lets us inherit the UncompressedMX implementation but lets + # the caller use appropriate attribute names for the rdata type. + # + # We probably lose some performance vs. a cut-and-paste + # implementation, but this way we don't copy code, and that's + # good. + + @property + def subtype(self): + "the AFSDB subtype" + return self.preference + + @property + def hostname(self): + "the AFSDB hostname" + return self.exchange diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/AMTRELAY.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/AMTRELAY.py new file mode 100644 index 0000000..9f093de --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/AMTRELAY.py @@ -0,0 +1,86 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdtypes.util + + +class Relay(dns.rdtypes.util.Gateway): + name = 'AMTRELAY relay' + + @property + def relay(self): + return self.gateway + + +@dns.immutable.immutable +class AMTRELAY(dns.rdata.Rdata): + + """AMTRELAY record""" + + # see: RFC 8777 + + __slots__ = ['precedence', 'discovery_optional', 'relay_type', 'relay'] + + def __init__(self, rdclass, rdtype, precedence, discovery_optional, + relay_type, relay): + super().__init__(rdclass, rdtype) + relay = Relay(relay_type, relay) + self.precedence = self._as_uint8(precedence) + self.discovery_optional = self._as_bool(discovery_optional) + self.relay_type = relay.type + self.relay = relay.relay + + def to_text(self, origin=None, relativize=True, **kw): + relay = Relay(self.relay_type, self.relay).to_text(origin, relativize) + return '%d %d %d %s' % (self.precedence, self.discovery_optional, + self.relay_type, relay) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + precedence = tok.get_uint8() + discovery_optional = tok.get_uint8() + if discovery_optional > 1: + raise dns.exception.SyntaxError('expecting 0 or 1') + discovery_optional = bool(discovery_optional) + relay_type = tok.get_uint8() + if relay_type > 0x7f: + raise dns.exception.SyntaxError('expecting an integer <= 127') + relay = Relay.from_text(relay_type, tok, origin, relativize, + relativize_to) + return cls(rdclass, rdtype, precedence, discovery_optional, relay_type, + relay.relay) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + relay_type = self.relay_type | (self.discovery_optional << 7) + header = struct.pack("!BB", self.precedence, relay_type) + file.write(header) + Relay(self.relay_type, self.relay).to_wire(file, compress, origin, + canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (precedence, relay_type) = parser.get_struct('!BB') + discovery_optional = bool(relay_type >> 7) + relay_type &= 0x7f + relay = Relay.from_wire_parser(relay_type, parser, origin) + return cls(rdclass, rdtype, precedence, discovery_optional, relay_type, + relay.relay) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/AVC.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/AVC.py new file mode 100644 index 0000000..11e026d --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/AVC.py @@ -0,0 +1,27 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.txtbase +import dns.immutable + + +@dns.immutable.immutable +class AVC(dns.rdtypes.txtbase.TXTBase): + + """AVC record""" + + # See: IANA dns parameters for AVC diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CAA.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CAA.py new file mode 100644 index 0000000..c86b45e --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CAA.py @@ -0,0 +1,69 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class CAA(dns.rdata.Rdata): + + """CAA (Certification Authority Authorization) record""" + + # see: RFC 6844 + + __slots__ = ['flags', 'tag', 'value'] + + def __init__(self, rdclass, rdtype, flags, tag, value): + super().__init__(rdclass, rdtype) + self.flags = self._as_uint8(flags) + self.tag = self._as_bytes(tag, True, 255) + if not tag.isalnum(): + raise ValueError("tag is not alphanumeric") + self.value = self._as_bytes(value) + + def to_text(self, origin=None, relativize=True, **kw): + return '%u %s "%s"' % (self.flags, + dns.rdata._escapify(self.tag), + dns.rdata._escapify(self.value)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + flags = tok.get_uint8() + tag = tok.get_string().encode() + value = tok.get_string().encode() + return cls(rdclass, rdtype, flags, tag, value) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack('!B', self.flags)) + l = len(self.tag) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.tag) + file.write(self.value) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + flags = parser.get_uint8() + tag = parser.get_counted_bytes() + value = parser.get_remaining() + return cls(rdclass, rdtype, flags, tag, value) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CDNSKEY.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CDNSKEY.py new file mode 100644 index 0000000..14b1941 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CDNSKEY.py @@ -0,0 +1,28 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.dnskeybase +import dns.immutable + +# pylint: disable=unused-import +from dns.rdtypes.dnskeybase import SEP, REVOKE, ZONE # noqa: F401 +# pylint: enable=unused-import + +@dns.immutable.immutable +class CDNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): + + """CDNSKEY record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CDS.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CDS.py new file mode 100644 index 0000000..094de12 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CDS.py @@ -0,0 +1,30 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.dsbase +import dns.immutable + + +@dns.immutable.immutable +class CDS(dns.rdtypes.dsbase.DSBase): + + """CDS record""" + + _digest_length_by_type = { + **dns.rdtypes.dsbase.DSBase._digest_length_by_type, + 0: 1, # delete, RFC 8078 Sec. 4 (including Errata ID 5049) + } diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CERT.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CERT.py new file mode 100644 index 0000000..f35ce3a --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CERT.py @@ -0,0 +1,113 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import base64 + +import dns.exception +import dns.immutable +import dns.dnssec +import dns.rdata +import dns.tokenizer + +_ctype_by_value = { + 1: 'PKIX', + 2: 'SPKI', + 3: 'PGP', + 4: 'IPKIX', + 5: 'ISPKI', + 6: 'IPGP', + 7: 'ACPKIX', + 8: 'IACPKIX', + 253: 'URI', + 254: 'OID', +} + +_ctype_by_name = { + 'PKIX': 1, + 'SPKI': 2, + 'PGP': 3, + 'IPKIX': 4, + 'ISPKI': 5, + 'IPGP': 6, + 'ACPKIX': 7, + 'IACPKIX': 8, + 'URI': 253, + 'OID': 254, +} + + +def _ctype_from_text(what): + v = _ctype_by_name.get(what) + if v is not None: + return v + return int(what) + + +def _ctype_to_text(what): + v = _ctype_by_value.get(what) + if v is not None: + return v + return str(what) + + +@dns.immutable.immutable +class CERT(dns.rdata.Rdata): + + """CERT record""" + + # see RFC 4398 + + __slots__ = ['certificate_type', 'key_tag', 'algorithm', 'certificate'] + + def __init__(self, rdclass, rdtype, certificate_type, key_tag, algorithm, + certificate): + super().__init__(rdclass, rdtype) + self.certificate_type = self._as_uint16(certificate_type) + self.key_tag = self._as_uint16(key_tag) + self.algorithm = self._as_uint8(algorithm) + self.certificate = self._as_bytes(certificate) + + def to_text(self, origin=None, relativize=True, **kw): + certificate_type = _ctype_to_text(self.certificate_type) + return "%s %d %s %s" % (certificate_type, self.key_tag, + dns.dnssec.algorithm_to_text(self.algorithm), + dns.rdata._base64ify(self.certificate, **kw)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + certificate_type = _ctype_from_text(tok.get_string()) + key_tag = tok.get_uint16() + algorithm = dns.dnssec.algorithm_from_text(tok.get_string()) + b64 = tok.concatenate_remaining_identifiers().encode() + certificate = base64.b64decode(b64) + return cls(rdclass, rdtype, certificate_type, key_tag, + algorithm, certificate) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + prefix = struct.pack("!HHB", self.certificate_type, self.key_tag, + self.algorithm) + file.write(prefix) + file.write(self.certificate) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (certificate_type, key_tag, algorithm) = parser.get_struct("!HHB") + certificate = parser.get_remaining() + return cls(rdclass, rdtype, certificate_type, key_tag, algorithm, + certificate) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CNAME.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CNAME.py new file mode 100644 index 0000000..a4fcfa8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CNAME.py @@ -0,0 +1,29 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.nsbase +import dns.immutable + + +@dns.immutable.immutable +class CNAME(dns.rdtypes.nsbase.NSBase): + + """CNAME record + + Note: although CNAME is officially a singleton type, dnspython allows + non-singleton CNAME rdatasets because such sets have been commonly + used by BIND and other nameservers for load balancing.""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CSYNC.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CSYNC.py new file mode 100644 index 0000000..979028a --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/CSYNC.py @@ -0,0 +1,68 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011, 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdatatype +import dns.name +import dns.rdtypes.util + + +@dns.immutable.immutable +class Bitmap(dns.rdtypes.util.Bitmap): + type_name = 'CSYNC' + + +@dns.immutable.immutable +class CSYNC(dns.rdata.Rdata): + + """CSYNC record""" + + __slots__ = ['serial', 'flags', 'windows'] + + def __init__(self, rdclass, rdtype, serial, flags, windows): + super().__init__(rdclass, rdtype) + self.serial = self._as_uint32(serial) + self.flags = self._as_uint16(flags) + if not isinstance(windows, Bitmap): + windows = Bitmap(windows) + self.windows = tuple(windows.windows) + + def to_text(self, origin=None, relativize=True, **kw): + text = Bitmap(self.windows).to_text() + return '%d %d%s' % (self.serial, self.flags, text) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + serial = tok.get_uint32() + flags = tok.get_uint16() + bitmap = Bitmap.from_text(tok) + return cls(rdclass, rdtype, serial, flags, bitmap) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack('!IH', self.serial, self.flags)) + Bitmap(self.windows).to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (serial, flags) = parser.get_struct("!IH") + bitmap = Bitmap.from_wire_parser(parser) + return cls(rdclass, rdtype, serial, flags, bitmap) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/DLV.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/DLV.py new file mode 100644 index 0000000..947dc42 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/DLV.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.dsbase +import dns.immutable + + +@dns.immutable.immutable +class DLV(dns.rdtypes.dsbase.DSBase): + + """DLV record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/DNAME.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/DNAME.py new file mode 100644 index 0000000..f4984b5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/DNAME.py @@ -0,0 +1,28 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.nsbase +import dns.immutable + + +@dns.immutable.immutable +class DNAME(dns.rdtypes.nsbase.UncompressedNS): + + """DNAME record""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.target.to_wire(file, None, origin, canonicalize) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/DNSKEY.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/DNSKEY.py new file mode 100644 index 0000000..e69a7c1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/DNSKEY.py @@ -0,0 +1,28 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.dnskeybase +import dns.immutable + +# pylint: disable=unused-import +from dns.rdtypes.dnskeybase import SEP, REVOKE, ZONE # noqa: F401 +# pylint: enable=unused-import + +@dns.immutable.immutable +class DNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): + + """DNSKEY record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/DS.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/DS.py new file mode 100644 index 0000000..3f6c3ee --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/DS.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.dsbase +import dns.immutable + + +@dns.immutable.immutable +class DS(dns.rdtypes.dsbase.DSBase): + + """DS record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/EUI48.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/EUI48.py new file mode 100644 index 0000000..0ab88ad --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/EUI48.py @@ -0,0 +1,31 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.euibase +import dns.immutable + + +@dns.immutable.immutable +class EUI48(dns.rdtypes.euibase.EUIBase): + + """EUI48 record""" + + # see: rfc7043.txt + + byte_len = 6 # 0123456789ab (in hex) + text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/EUI64.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/EUI64.py new file mode 100644 index 0000000..c42957e --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/EUI64.py @@ -0,0 +1,31 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.euibase +import dns.immutable + + +@dns.immutable.immutable +class EUI64(dns.rdtypes.euibase.EUIBase): + + """EUI64 record""" + + # see: rfc7043.txt + + byte_len = 8 # 0123456789abcdef (in hex) + text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab-cd-ef diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/GPOS.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/GPOS.py new file mode 100644 index 0000000..29fa8f8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/GPOS.py @@ -0,0 +1,128 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +def _validate_float_string(what): + if len(what) == 0: + raise dns.exception.FormError + if what[0] == b'-'[0] or what[0] == b'+'[0]: + what = what[1:] + if what.isdigit(): + return + try: + (left, right) = what.split(b'.') + except ValueError: + raise dns.exception.FormError + if left == b'' and right == b'': + raise dns.exception.FormError + if not left == b'' and not left.decode().isdigit(): + raise dns.exception.FormError + if not right == b'' and not right.decode().isdigit(): + raise dns.exception.FormError + + +@dns.immutable.immutable +class GPOS(dns.rdata.Rdata): + + """GPOS record""" + + # see: RFC 1712 + + __slots__ = ['latitude', 'longitude', 'altitude'] + + def __init__(self, rdclass, rdtype, latitude, longitude, altitude): + super().__init__(rdclass, rdtype) + if isinstance(latitude, float) or \ + isinstance(latitude, int): + latitude = str(latitude) + if isinstance(longitude, float) or \ + isinstance(longitude, int): + longitude = str(longitude) + if isinstance(altitude, float) or \ + isinstance(altitude, int): + altitude = str(altitude) + latitude = self._as_bytes(latitude, True, 255) + longitude = self._as_bytes(longitude, True, 255) + altitude = self._as_bytes(altitude, True, 255) + _validate_float_string(latitude) + _validate_float_string(longitude) + _validate_float_string(altitude) + self.latitude = latitude + self.longitude = longitude + self.altitude = altitude + flat = self.float_latitude + if flat < -90.0 or flat > 90.0: + raise dns.exception.FormError('bad latitude') + flong = self.float_longitude + if flong < -180.0 or flong > 180.0: + raise dns.exception.FormError('bad longitude') + + def to_text(self, origin=None, relativize=True, **kw): + return '{} {} {}'.format(self.latitude.decode(), + self.longitude.decode(), + self.altitude.decode()) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + latitude = tok.get_string() + longitude = tok.get_string() + altitude = tok.get_string() + return cls(rdclass, rdtype, latitude, longitude, altitude) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.latitude) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.latitude) + l = len(self.longitude) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.longitude) + l = len(self.altitude) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.altitude) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + latitude = parser.get_counted_bytes() + longitude = parser.get_counted_bytes() + altitude = parser.get_counted_bytes() + return cls(rdclass, rdtype, latitude, longitude, altitude) + + @property + def float_latitude(self): + "latitude as a floating point value" + return float(self.latitude) + + @property + def float_longitude(self): + "longitude as a floating point value" + return float(self.longitude) + + @property + def float_altitude(self): + "altitude as a floating point value" + return float(self.altitude) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/HINFO.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/HINFO.py new file mode 100644 index 0000000..cd04969 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/HINFO.py @@ -0,0 +1,65 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class HINFO(dns.rdata.Rdata): + + """HINFO record""" + + # see: RFC 1035 + + __slots__ = ['cpu', 'os'] + + def __init__(self, rdclass, rdtype, cpu, os): + super().__init__(rdclass, rdtype) + self.cpu = self._as_bytes(cpu, True, 255) + self.os = self._as_bytes(os, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + return '"{}" "{}"'.format(dns.rdata._escapify(self.cpu), + dns.rdata._escapify(self.os)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + cpu = tok.get_string(max_length=255) + os = tok.get_string(max_length=255) + return cls(rdclass, rdtype, cpu, os) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.cpu) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.cpu) + l = len(self.os) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.os) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + cpu = parser.get_counted_bytes() + os = parser.get_counted_bytes() + return cls(rdclass, rdtype, cpu, os) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/HIP.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/HIP.py new file mode 100644 index 0000000..e887359 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/HIP.py @@ -0,0 +1,85 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2010, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import base64 +import binascii + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class HIP(dns.rdata.Rdata): + + """HIP record""" + + # see: RFC 5205 + + __slots__ = ['hit', 'algorithm', 'key', 'servers'] + + def __init__(self, rdclass, rdtype, hit, algorithm, key, servers): + super().__init__(rdclass, rdtype) + self.hit = self._as_bytes(hit, True, 255) + self.algorithm = self._as_uint8(algorithm) + self.key = self._as_bytes(key, True) + self.servers = self._as_tuple(servers, self._as_name) + + def to_text(self, origin=None, relativize=True, **kw): + hit = binascii.hexlify(self.hit).decode() + key = base64.b64encode(self.key).replace(b'\n', b'').decode() + text = '' + servers = [] + for server in self.servers: + servers.append(server.choose_relativity(origin, relativize)) + if len(servers) > 0: + text += (' ' + ' '.join((x.to_unicode() for x in servers))) + return '%u %s %s%s' % (self.algorithm, hit, key, text) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + algorithm = tok.get_uint8() + hit = binascii.unhexlify(tok.get_string().encode()) + key = base64.b64decode(tok.get_string().encode()) + servers = [] + for token in tok.get_remaining(): + server = tok.as_name(token, origin, relativize, relativize_to) + servers.append(server) + return cls(rdclass, rdtype, hit, algorithm, key, servers) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + lh = len(self.hit) + lk = len(self.key) + file.write(struct.pack("!BBH", lh, self.algorithm, lk)) + file.write(self.hit) + file.write(self.key) + for server in self.servers: + server.to_wire(file, None, origin, False) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (lh, algorithm, lk) = parser.get_struct('!BBH') + hit = parser.get_bytes(lh) + key = parser.get_bytes(lk) + servers = [] + while parser.remaining() > 0: + server = parser.get_name(origin) + servers.append(server) + return cls(rdclass, rdtype, hit, algorithm, key, servers) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/ISDN.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/ISDN.py new file mode 100644 index 0000000..b9a49ad --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/ISDN.py @@ -0,0 +1,76 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class ISDN(dns.rdata.Rdata): + + """ISDN record""" + + # see: RFC 1183 + + __slots__ = ['address', 'subaddress'] + + def __init__(self, rdclass, rdtype, address, subaddress): + super().__init__(rdclass, rdtype) + self.address = self._as_bytes(address, True, 255) + self.subaddress = self._as_bytes(subaddress, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + if self.subaddress: + return '"{}" "{}"'.format(dns.rdata._escapify(self.address), + dns.rdata._escapify(self.subaddress)) + else: + return '"%s"' % dns.rdata._escapify(self.address) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + address = tok.get_string() + tokens = tok.get_remaining(max_tokens=1) + if len(tokens) >= 1: + subaddress = tokens[0].unescape().value + else: + subaddress = '' + return cls(rdclass, rdtype, address, subaddress) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.address) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.address) + l = len(self.subaddress) + if l > 0: + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.subaddress) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_counted_bytes() + if parser.remaining() > 0: + subaddress = parser.get_counted_bytes() + else: + subaddress = b'' + return cls(rdclass, rdtype, address, subaddress) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/L32.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/L32.py new file mode 100644 index 0000000..47eff95 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/L32.py @@ -0,0 +1,40 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable + + +@dns.immutable.immutable +class L32(dns.rdata.Rdata): + + """L32 record""" + + # see: rfc6742.txt + + __slots__ = ['preference', 'locator32'] + + def __init__(self, rdclass, rdtype, preference, locator32): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.locator32 = self._as_ipv4_address(locator32) + + def to_text(self, origin=None, relativize=True, **kw): + return f'{self.preference} {self.locator32}' + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + preference = tok.get_uint16() + nodeid = tok.get_identifier() + return cls(rdclass, rdtype, preference, nodeid) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack('!H', self.preference)) + file.write(dns.ipv4.inet_aton(self.locator32)) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + locator32 = parser.get_remaining() + return cls(rdclass, rdtype, preference, locator32) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/L64.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/L64.py new file mode 100644 index 0000000..aab36a8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/L64.py @@ -0,0 +1,48 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.rdtypes.util + + +@dns.immutable.immutable +class L64(dns.rdata.Rdata): + + """L64 record""" + + # see: rfc6742.txt + + __slots__ = ['preference', 'locator64'] + + def __init__(self, rdclass, rdtype, preference, locator64): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + if isinstance(locator64, bytes): + if len(locator64) != 8: + raise ValueError('invalid locator64') + self.locator64 = dns.rdata._hexify(locator64, 4, b':') + else: + dns.rdtypes.util.parse_formatted_hex(locator64, 4, 4, ':') + self.locator64 = locator64 + + def to_text(self, origin=None, relativize=True, **kw): + return f'{self.preference} {self.locator64}' + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + preference = tok.get_uint16() + locator64 = tok.get_identifier() + return cls(rdclass, rdtype, preference, locator64) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack('!H', self.preference)) + file.write(dns.rdtypes.util.parse_formatted_hex(self.locator64, + 4, 4, ':')) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + locator64 = parser.get_remaining() + return cls(rdclass, rdtype, preference, locator64) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/LOC.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/LOC.py new file mode 100644 index 0000000..c939899 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/LOC.py @@ -0,0 +1,326 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata + + +_pows = tuple(10**i for i in range(0, 11)) + +# default values are in centimeters +_default_size = 100.0 +_default_hprec = 1000000.0 +_default_vprec = 1000.0 + +# for use by from_wire() +_MAX_LATITUDE = 0x80000000 + 90 * 3600000 +_MIN_LATITUDE = 0x80000000 - 90 * 3600000 +_MAX_LONGITUDE = 0x80000000 + 180 * 3600000 +_MIN_LONGITUDE = 0x80000000 - 180 * 3600000 + + +def _exponent_of(what, desc): + if what == 0: + return 0 + exp = None + for (i, pow) in enumerate(_pows): + if what < pow: + exp = i - 1 + break + if exp is None or exp < 0: + raise dns.exception.SyntaxError("%s value out of bounds" % desc) + return exp + + +def _float_to_tuple(what): + if what < 0: + sign = -1 + what *= -1 + else: + sign = 1 + what = round(what * 3600000) + degrees = int(what // 3600000) + what -= degrees * 3600000 + minutes = int(what // 60000) + what -= minutes * 60000 + seconds = int(what // 1000) + what -= int(seconds * 1000) + what = int(what) + return (degrees, minutes, seconds, what, sign) + + +def _tuple_to_float(what): + value = float(what[0]) + value += float(what[1]) / 60.0 + value += float(what[2]) / 3600.0 + value += float(what[3]) / 3600000.0 + return float(what[4]) * value + + +def _encode_size(what, desc): + what = int(what) + exponent = _exponent_of(what, desc) & 0xF + base = what // pow(10, exponent) & 0xF + return base * 16 + exponent + + +def _decode_size(what, desc): + exponent = what & 0x0F + if exponent > 9: + raise dns.exception.FormError("bad %s exponent" % desc) + base = (what & 0xF0) >> 4 + if base > 9: + raise dns.exception.FormError("bad %s base" % desc) + return base * pow(10, exponent) + + +def _check_coordinate_list(value, low, high): + if value[0] < low or value[0] > high: + raise ValueError(f'not in range [{low}, {high}]') + if value[1] < 0 or value[1] > 59: + raise ValueError('bad minutes value') + if value[2] < 0 or value[2] > 59: + raise ValueError('bad seconds value') + if value[3] < 0 or value[3] > 999: + raise ValueError('bad milliseconds value') + if value[4] != 1 and value[4] != -1: + raise ValueError('bad hemisphere value') + + +@dns.immutable.immutable +class LOC(dns.rdata.Rdata): + + """LOC record""" + + # see: RFC 1876 + + __slots__ = ['latitude', 'longitude', 'altitude', 'size', + 'horizontal_precision', 'vertical_precision'] + + def __init__(self, rdclass, rdtype, latitude, longitude, altitude, + size=_default_size, hprec=_default_hprec, + vprec=_default_vprec): + """Initialize a LOC record instance. + + The parameters I{latitude} and I{longitude} may be either a 4-tuple + of integers specifying (degrees, minutes, seconds, milliseconds), + or they may be floating point values specifying the number of + degrees. The other parameters are floats. Size, horizontal precision, + and vertical precision are specified in centimeters.""" + + super().__init__(rdclass, rdtype) + if isinstance(latitude, int): + latitude = float(latitude) + if isinstance(latitude, float): + latitude = _float_to_tuple(latitude) + _check_coordinate_list(latitude, -90, 90) + self.latitude = tuple(latitude) + if isinstance(longitude, int): + longitude = float(longitude) + if isinstance(longitude, float): + longitude = _float_to_tuple(longitude) + _check_coordinate_list(longitude, -180, 180) + self.longitude = tuple(longitude) + self.altitude = float(altitude) + self.size = float(size) + self.horizontal_precision = float(hprec) + self.vertical_precision = float(vprec) + + def to_text(self, origin=None, relativize=True, **kw): + if self.latitude[4] > 0: + lat_hemisphere = 'N' + else: + lat_hemisphere = 'S' + if self.longitude[4] > 0: + long_hemisphere = 'E' + else: + long_hemisphere = 'W' + text = "%d %d %d.%03d %s %d %d %d.%03d %s %0.2fm" % ( + self.latitude[0], self.latitude[1], + self.latitude[2], self.latitude[3], lat_hemisphere, + self.longitude[0], self.longitude[1], self.longitude[2], + self.longitude[3], long_hemisphere, + self.altitude / 100.0 + ) + + # do not print default values + if self.size != _default_size or \ + self.horizontal_precision != _default_hprec or \ + self.vertical_precision != _default_vprec: + text += " {:0.2f}m {:0.2f}m {:0.2f}m".format( + self.size / 100.0, self.horizontal_precision / 100.0, + self.vertical_precision / 100.0 + ) + return text + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + latitude = [0, 0, 0, 0, 1] + longitude = [0, 0, 0, 0, 1] + size = _default_size + hprec = _default_hprec + vprec = _default_vprec + + latitude[0] = tok.get_int() + t = tok.get_string() + if t.isdigit(): + latitude[1] = int(t) + t = tok.get_string() + if '.' in t: + (seconds, milliseconds) = t.split('.') + if not seconds.isdigit(): + raise dns.exception.SyntaxError( + 'bad latitude seconds value') + latitude[2] = int(seconds) + l = len(milliseconds) + if l == 0 or l > 3 or not milliseconds.isdigit(): + raise dns.exception.SyntaxError( + 'bad latitude milliseconds value') + if l == 1: + m = 100 + elif l == 2: + m = 10 + else: + m = 1 + latitude[3] = m * int(milliseconds) + t = tok.get_string() + elif t.isdigit(): + latitude[2] = int(t) + t = tok.get_string() + if t == 'S': + latitude[4] = -1 + elif t != 'N': + raise dns.exception.SyntaxError('bad latitude hemisphere value') + + longitude[0] = tok.get_int() + t = tok.get_string() + if t.isdigit(): + longitude[1] = int(t) + t = tok.get_string() + if '.' in t: + (seconds, milliseconds) = t.split('.') + if not seconds.isdigit(): + raise dns.exception.SyntaxError( + 'bad longitude seconds value') + longitude[2] = int(seconds) + l = len(milliseconds) + if l == 0 or l > 3 or not milliseconds.isdigit(): + raise dns.exception.SyntaxError( + 'bad longitude milliseconds value') + if l == 1: + m = 100 + elif l == 2: + m = 10 + else: + m = 1 + longitude[3] = m * int(milliseconds) + t = tok.get_string() + elif t.isdigit(): + longitude[2] = int(t) + t = tok.get_string() + if t == 'W': + longitude[4] = -1 + elif t != 'E': + raise dns.exception.SyntaxError('bad longitude hemisphere value') + + t = tok.get_string() + if t[-1] == 'm': + t = t[0: -1] + altitude = float(t) * 100.0 # m -> cm + + tokens = tok.get_remaining(max_tokens=3) + if len(tokens) >= 1: + value = tokens[0].unescape().value + if value[-1] == 'm': + value = value[0: -1] + size = float(value) * 100.0 # m -> cm + if len(tokens) >= 2: + value = tokens[1].unescape().value + if value[-1] == 'm': + value = value[0: -1] + hprec = float(value) * 100.0 # m -> cm + if len(tokens) >= 3: + value = tokens[2].unescape().value + if value[-1] == 'm': + value = value[0: -1] + vprec = float(value) * 100.0 # m -> cm + + # Try encoding these now so we raise if they are bad + _encode_size(size, "size") + _encode_size(hprec, "horizontal precision") + _encode_size(vprec, "vertical precision") + + return cls(rdclass, rdtype, latitude, longitude, altitude, + size, hprec, vprec) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + milliseconds = (self.latitude[0] * 3600000 + + self.latitude[1] * 60000 + + self.latitude[2] * 1000 + + self.latitude[3]) * self.latitude[4] + latitude = 0x80000000 + milliseconds + milliseconds = (self.longitude[0] * 3600000 + + self.longitude[1] * 60000 + + self.longitude[2] * 1000 + + self.longitude[3]) * self.longitude[4] + longitude = 0x80000000 + milliseconds + altitude = int(self.altitude) + 10000000 + size = _encode_size(self.size, "size") + hprec = _encode_size(self.horizontal_precision, "horizontal precision") + vprec = _encode_size(self.vertical_precision, "vertical precision") + wire = struct.pack("!BBBBIII", 0, size, hprec, vprec, latitude, + longitude, altitude) + file.write(wire) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (version, size, hprec, vprec, latitude, longitude, altitude) = \ + parser.get_struct("!BBBBIII") + if version != 0: + raise dns.exception.FormError("LOC version not zero") + if latitude < _MIN_LATITUDE or latitude > _MAX_LATITUDE: + raise dns.exception.FormError("bad latitude") + if latitude > 0x80000000: + latitude = (latitude - 0x80000000) / 3600000 + else: + latitude = -1 * (0x80000000 - latitude) / 3600000 + if longitude < _MIN_LONGITUDE or longitude > _MAX_LONGITUDE: + raise dns.exception.FormError("bad longitude") + if longitude > 0x80000000: + longitude = (longitude - 0x80000000) / 3600000 + else: + longitude = -1 * (0x80000000 - longitude) / 3600000 + altitude = float(altitude) - 10000000.0 + size = _decode_size(size, "size") + hprec = _decode_size(hprec, "horizontal precision") + vprec = _decode_size(vprec, "vertical precision") + return cls(rdclass, rdtype, latitude, longitude, altitude, + size, hprec, vprec) + + @property + def float_latitude(self): + "latitude as a floating point value" + return _tuple_to_float(self.latitude) + + @property + def float_longitude(self): + "longitude as a floating point value" + return _tuple_to_float(self.longitude) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/LP.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/LP.py new file mode 100644 index 0000000..b6a2e36 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/LP.py @@ -0,0 +1,41 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable + + +@dns.immutable.immutable +class LP(dns.rdata.Rdata): + + """LP record""" + + # see: rfc6742.txt + + __slots__ = ['preference', 'fqdn'] + + def __init__(self, rdclass, rdtype, preference, fqdn): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.fqdn = self._as_name(fqdn) + + def to_text(self, origin=None, relativize=True, **kw): + fqdn = self.fqdn.choose_relativity(origin, relativize) + return '%d %s' % (self.preference, fqdn) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + preference = tok.get_uint16() + fqdn = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, preference, fqdn) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack('!H', self.preference)) + self.fqdn.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + fqdn = parser.get_name(origin) + return cls(rdclass, rdtype, preference, fqdn) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/MX.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/MX.py new file mode 100644 index 0000000..a697ea4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/MX.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.mxbase +import dns.immutable + + +@dns.immutable.immutable +class MX(dns.rdtypes.mxbase.MXBase): + + """MX record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NID.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NID.py new file mode 100644 index 0000000..74951bb --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NID.py @@ -0,0 +1,47 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.rdtypes.util + + +@dns.immutable.immutable +class NID(dns.rdata.Rdata): + + """NID record""" + + # see: rfc6742.txt + + __slots__ = ['preference', 'nodeid'] + + def __init__(self, rdclass, rdtype, preference, nodeid): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + if isinstance(nodeid, bytes): + if len(nodeid) != 8: + raise ValueError('invalid nodeid') + self.nodeid = dns.rdata._hexify(nodeid, 4, b':') + else: + dns.rdtypes.util.parse_formatted_hex(nodeid, 4, 4, ':') + self.nodeid = nodeid + + def to_text(self, origin=None, relativize=True, **kw): + return f'{self.preference} {self.nodeid}' + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + preference = tok.get_uint16() + nodeid = tok.get_identifier() + return cls(rdclass, rdtype, preference, nodeid) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack('!H', self.preference)) + file.write(dns.rdtypes.util.parse_formatted_hex(self.nodeid, 4, 4, ':')) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + nodeid = parser.get_remaining() + return cls(rdclass, rdtype, preference, nodeid) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NINFO.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NINFO.py new file mode 100644 index 0000000..d53e967 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NINFO.py @@ -0,0 +1,27 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.txtbase +import dns.immutable + + +@dns.immutable.immutable +class NINFO(dns.rdtypes.txtbase.TXTBase): + + """NINFO record""" + + # see: draft-reid-dnsext-zs-01 diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NS.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NS.py new file mode 100644 index 0000000..a0cc232 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NS.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.nsbase +import dns.immutable + + +@dns.immutable.immutable +class NS(dns.rdtypes.nsbase.NSBase): + + """NS record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NSEC.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NSEC.py new file mode 100644 index 0000000..dc31f4c --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NSEC.py @@ -0,0 +1,67 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdatatype +import dns.name +import dns.rdtypes.util + + +@dns.immutable.immutable +class Bitmap(dns.rdtypes.util.Bitmap): + type_name = 'NSEC' + + +@dns.immutable.immutable +class NSEC(dns.rdata.Rdata): + + """NSEC record""" + + __slots__ = ['next', 'windows'] + + def __init__(self, rdclass, rdtype, next, windows): + super().__init__(rdclass, rdtype) + self.next = self._as_name(next) + if not isinstance(windows, Bitmap): + windows = Bitmap(windows) + self.windows = tuple(windows.windows) + + def to_text(self, origin=None, relativize=True, **kw): + next = self.next.choose_relativity(origin, relativize) + text = Bitmap(self.windows).to_text() + return '{}{}'.format(next, text) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + next = tok.get_name(origin, relativize, relativize_to) + windows = Bitmap.from_text(tok) + return cls(rdclass, rdtype, next, windows) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + # Note that NSEC downcasing, originally mandated by RFC 4034 + # section 6.2 was removed by RFC 6840 section 5.1. + self.next.to_wire(file, None, origin, False) + Bitmap(self.windows).to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + next = parser.get_name(origin) + bitmap = Bitmap.from_wire_parser(parser) + return cls(rdclass, rdtype, next, bitmap) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NSEC3.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NSEC3.py new file mode 100644 index 0000000..14242bd --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NSEC3.py @@ -0,0 +1,111 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import binascii +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdatatype +import dns.rdtypes.util + + +b32_hex_to_normal = bytes.maketrans(b'0123456789ABCDEFGHIJKLMNOPQRSTUV', + b'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567') +b32_normal_to_hex = bytes.maketrans(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567', + b'0123456789ABCDEFGHIJKLMNOPQRSTUV') + +# hash algorithm constants +SHA1 = 1 + +# flag constants +OPTOUT = 1 + + +@dns.immutable.immutable +class Bitmap(dns.rdtypes.util.Bitmap): + type_name = 'NSEC3' + + +@dns.immutable.immutable +class NSEC3(dns.rdata.Rdata): + + """NSEC3 record""" + + __slots__ = ['algorithm', 'flags', 'iterations', 'salt', 'next', 'windows'] + + def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt, + next, windows): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_uint8(algorithm) + self.flags = self._as_uint8(flags) + self.iterations = self._as_uint16(iterations) + self.salt = self._as_bytes(salt, True, 255) + self.next = self._as_bytes(next, True, 255) + if not isinstance(windows, Bitmap): + windows = Bitmap(windows) + self.windows = tuple(windows.windows) + + def to_text(self, origin=None, relativize=True, **kw): + next = base64.b32encode(self.next).translate( + b32_normal_to_hex).lower().decode() + if self.salt == b'': + salt = '-' + else: + salt = binascii.hexlify(self.salt).decode() + text = Bitmap(self.windows).to_text() + return '%u %u %u %s %s%s' % (self.algorithm, self.flags, + self.iterations, salt, next, text) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + algorithm = tok.get_uint8() + flags = tok.get_uint8() + iterations = tok.get_uint16() + salt = tok.get_string() + if salt == '-': + salt = b'' + else: + salt = binascii.unhexlify(salt.encode('ascii')) + next = tok.get_string().encode( + 'ascii').upper().translate(b32_hex_to_normal) + next = base64.b32decode(next) + bitmap = Bitmap.from_text(tok) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, + bitmap) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.salt) + file.write(struct.pack("!BBHB", self.algorithm, self.flags, + self.iterations, l)) + file.write(self.salt) + l = len(self.next) + file.write(struct.pack("!B", l)) + file.write(self.next) + Bitmap(self.windows).to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (algorithm, flags, iterations) = parser.get_struct('!BBH') + salt = parser.get_counted_bytes() + next = parser.get_counted_bytes() + bitmap = Bitmap.from_wire_parser(parser) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, + bitmap) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py new file mode 100644 index 0000000..299bf6e --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py @@ -0,0 +1,71 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import binascii + +import dns.exception +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class NSEC3PARAM(dns.rdata.Rdata): + + """NSEC3PARAM record""" + + __slots__ = ['algorithm', 'flags', 'iterations', 'salt'] + + def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_uint8(algorithm) + self.flags = self._as_uint8(flags) + self.iterations = self._as_uint16(iterations) + self.salt = self._as_bytes(salt, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + if self.salt == b'': + salt = '-' + else: + salt = binascii.hexlify(self.salt).decode() + return '%u %u %u %s' % (self.algorithm, self.flags, self.iterations, + salt) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + algorithm = tok.get_uint8() + flags = tok.get_uint8() + iterations = tok.get_uint16() + salt = tok.get_string() + if salt == '-': + salt = '' + else: + salt = binascii.unhexlify(salt.encode()) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.salt) + file.write(struct.pack("!BBHB", self.algorithm, self.flags, + self.iterations, l)) + file.write(self.salt) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (algorithm, flags, iterations) = parser.get_struct('!BBH') + salt = parser.get_counted_bytes() + return cls(rdclass, rdtype, algorithm, flags, iterations, salt) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py new file mode 100644 index 0000000..dcfa028 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py @@ -0,0 +1,52 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + +@dns.immutable.immutable +class OPENPGPKEY(dns.rdata.Rdata): + + """OPENPGPKEY record""" + + # see: RFC 7929 + + def __init__(self, rdclass, rdtype, key): + super().__init__(rdclass, rdtype) + self.key = self._as_bytes(key) + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._base64ify(self.key, chunksize=None, **kw) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + b64 = tok.concatenate_remaining_identifiers().encode() + key = base64.b64decode(b64) + return cls(rdclass, rdtype, key) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.key) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + key = parser.get_remaining() + return cls(rdclass, rdtype, key) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/OPT.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/OPT.py new file mode 100644 index 0000000..69b8fe7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/OPT.py @@ -0,0 +1,76 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.edns +import dns.immutable +import dns.exception +import dns.rdata + + +# We don't implement from_text, and that's ok. +# pylint: disable=abstract-method + +@dns.immutable.immutable +class OPT(dns.rdata.Rdata): + + """OPT record""" + + __slots__ = ['options'] + + def __init__(self, rdclass, rdtype, options): + """Initialize an OPT rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata, + which is also the payload size. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + + *options*, a tuple of ``bytes`` + """ + + super().__init__(rdclass, rdtype) + def as_option(option): + if not isinstance(option, dns.edns.Option): + raise ValueError('option is not a dns.edns.option') + return option + self.options = self._as_tuple(options, as_option) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + for opt in self.options: + owire = opt.to_wire() + file.write(struct.pack("!HH", opt.otype, len(owire))) + file.write(owire) + + def to_text(self, origin=None, relativize=True, **kw): + return ' '.join(opt.to_text() for opt in self.options) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + options = [] + while parser.remaining() > 0: + (otype, olen) = parser.get_struct('!HH') + with parser.restrict_to(olen): + opt = dns.edns.option_from_wire_parser(otype, parser) + options.append(opt) + return cls(rdclass, rdtype, options) + + @property + def payload(self): + "payload size" + return self.rdclass diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/PTR.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/PTR.py new file mode 100644 index 0000000..265bed0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/PTR.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.nsbase +import dns.immutable + + +@dns.immutable.immutable +class PTR(dns.rdtypes.nsbase.NSBase): + + """PTR record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/RP.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/RP.py new file mode 100644 index 0000000..a4e2297 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/RP.py @@ -0,0 +1,58 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.rdata +import dns.name + + +@dns.immutable.immutable +class RP(dns.rdata.Rdata): + + """RP record""" + + # see: RFC 1183 + + __slots__ = ['mbox', 'txt'] + + def __init__(self, rdclass, rdtype, mbox, txt): + super().__init__(rdclass, rdtype) + self.mbox = self._as_name(mbox) + self.txt = self._as_name(txt) + + def to_text(self, origin=None, relativize=True, **kw): + mbox = self.mbox.choose_relativity(origin, relativize) + txt = self.txt.choose_relativity(origin, relativize) + return "{} {}".format(str(mbox), str(txt)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + mbox = tok.get_name(origin, relativize, relativize_to) + txt = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, mbox, txt) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.mbox.to_wire(file, None, origin, canonicalize) + self.txt.to_wire(file, None, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + mbox = parser.get_name(origin) + txt = parser.get_name(origin) + return cls(rdclass, rdtype, mbox, txt) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/RRSIG.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/RRSIG.py new file mode 100644 index 0000000..d050ccc --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/RRSIG.py @@ -0,0 +1,124 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import calendar +import struct +import time + +import dns.dnssec +import dns.immutable +import dns.exception +import dns.rdata +import dns.rdatatype + + +class BadSigTime(dns.exception.DNSException): + + """Time in DNS SIG or RRSIG resource record cannot be parsed.""" + + +def sigtime_to_posixtime(what): + if len(what) <= 10 and what.isdigit(): + return int(what) + if len(what) != 14: + raise BadSigTime + year = int(what[0:4]) + month = int(what[4:6]) + day = int(what[6:8]) + hour = int(what[8:10]) + minute = int(what[10:12]) + second = int(what[12:14]) + return calendar.timegm((year, month, day, hour, minute, second, + 0, 0, 0)) + + +def posixtime_to_sigtime(what): + return time.strftime('%Y%m%d%H%M%S', time.gmtime(what)) + + +@dns.immutable.immutable +class RRSIG(dns.rdata.Rdata): + + """RRSIG record""" + + __slots__ = ['type_covered', 'algorithm', 'labels', 'original_ttl', + 'expiration', 'inception', 'key_tag', 'signer', + 'signature'] + + def __init__(self, rdclass, rdtype, type_covered, algorithm, labels, + original_ttl, expiration, inception, key_tag, signer, + signature): + super().__init__(rdclass, rdtype) + self.type_covered = self._as_rdatatype(type_covered) + self.algorithm = dns.dnssec.Algorithm.make(algorithm) + self.labels = self._as_uint8(labels) + self.original_ttl = self._as_ttl(original_ttl) + self.expiration = self._as_uint32(expiration) + self.inception = self._as_uint32(inception) + self.key_tag = self._as_uint16(key_tag) + self.signer = self._as_name(signer) + self.signature = self._as_bytes(signature) + + def covers(self): + return self.type_covered + + def to_text(self, origin=None, relativize=True, **kw): + return '%s %d %d %d %s %s %d %s %s' % ( + dns.rdatatype.to_text(self.type_covered), + self.algorithm, + self.labels, + self.original_ttl, + posixtime_to_sigtime(self.expiration), + posixtime_to_sigtime(self.inception), + self.key_tag, + self.signer.choose_relativity(origin, relativize), + dns.rdata._base64ify(self.signature, **kw) + ) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + type_covered = dns.rdatatype.from_text(tok.get_string()) + algorithm = dns.dnssec.algorithm_from_text(tok.get_string()) + labels = tok.get_int() + original_ttl = tok.get_ttl() + expiration = sigtime_to_posixtime(tok.get_string()) + inception = sigtime_to_posixtime(tok.get_string()) + key_tag = tok.get_int() + signer = tok.get_name(origin, relativize, relativize_to) + b64 = tok.concatenate_remaining_identifiers().encode() + signature = base64.b64decode(b64) + return cls(rdclass, rdtype, type_covered, algorithm, labels, + original_ttl, expiration, inception, key_tag, signer, + signature) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack('!HBBIIIH', self.type_covered, + self.algorithm, self.labels, + self.original_ttl, self.expiration, + self.inception, self.key_tag) + file.write(header) + self.signer.to_wire(file, None, origin, canonicalize) + file.write(self.signature) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct('!HBBIIIH') + signer = parser.get_name(origin) + signature = parser.get_remaining() + return cls(rdclass, rdtype, *header, signer, signature) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/RT.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/RT.py new file mode 100644 index 0000000..8d9c6bd --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/RT.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.mxbase +import dns.immutable + + +@dns.immutable.immutable +class RT(dns.rdtypes.mxbase.UncompressedDowncasingMX): + + """RT record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/SMIMEA.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/SMIMEA.py new file mode 100644 index 0000000..55d87bf --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/SMIMEA.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.tlsabase + + +@dns.immutable.immutable +class SMIMEA(dns.rdtypes.tlsabase.TLSABase): + """SMIMEA record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/SOA.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/SOA.py new file mode 100644 index 0000000..7ce8865 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/SOA.py @@ -0,0 +1,78 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.name + + +@dns.immutable.immutable +class SOA(dns.rdata.Rdata): + + """SOA record""" + + # see: RFC 1035 + + __slots__ = ['mname', 'rname', 'serial', 'refresh', 'retry', 'expire', + 'minimum'] + + def __init__(self, rdclass, rdtype, mname, rname, serial, refresh, retry, + expire, minimum): + super().__init__(rdclass, rdtype) + self.mname = self._as_name(mname) + self.rname = self._as_name(rname) + self.serial = self._as_uint32(serial) + self.refresh = self._as_ttl(refresh) + self.retry = self._as_ttl(retry) + self.expire = self._as_ttl(expire) + self.minimum = self._as_ttl(minimum) + + def to_text(self, origin=None, relativize=True, **kw): + mname = self.mname.choose_relativity(origin, relativize) + rname = self.rname.choose_relativity(origin, relativize) + return '%s %s %d %d %d %d %d' % ( + mname, rname, self.serial, self.refresh, self.retry, + self.expire, self.minimum) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + mname = tok.get_name(origin, relativize, relativize_to) + rname = tok.get_name(origin, relativize, relativize_to) + serial = tok.get_uint32() + refresh = tok.get_ttl() + retry = tok.get_ttl() + expire = tok.get_ttl() + minimum = tok.get_ttl() + return cls(rdclass, rdtype, mname, rname, serial, refresh, retry, + expire, minimum) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.mname.to_wire(file, compress, origin, canonicalize) + self.rname.to_wire(file, compress, origin, canonicalize) + five_ints = struct.pack('!IIIII', self.serial, self.refresh, + self.retry, self.expire, self.minimum) + file.write(five_ints) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + mname = parser.get_name(origin) + rname = parser.get_name(origin) + return cls(rdclass, rdtype, mname, rname, *parser.get_struct('!IIIII')) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/SPF.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/SPF.py new file mode 100644 index 0000000..1190e0d --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/SPF.py @@ -0,0 +1,27 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.txtbase +import dns.immutable + + +@dns.immutable.immutable +class SPF(dns.rdtypes.txtbase.TXTBase): + + """SPF record""" + + # see: RFC 4408 diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/SSHFP.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/SSHFP.py new file mode 100644 index 0000000..cc03519 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/SSHFP.py @@ -0,0 +1,69 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import binascii + +import dns.rdata +import dns.immutable +import dns.rdatatype + + +@dns.immutable.immutable +class SSHFP(dns.rdata.Rdata): + + """SSHFP record""" + + # See RFC 4255 + + __slots__ = ['algorithm', 'fp_type', 'fingerprint'] + + def __init__(self, rdclass, rdtype, algorithm, fp_type, + fingerprint): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_uint8(algorithm) + self.fp_type = self._as_uint8(fp_type) + self.fingerprint = self._as_bytes(fingerprint, True) + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop('chunksize', 128) + return '%d %d %s' % (self.algorithm, + self.fp_type, + dns.rdata._hexify(self.fingerprint, + chunksize=chunksize, + **kw)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + algorithm = tok.get_uint8() + fp_type = tok.get_uint8() + fingerprint = tok.concatenate_remaining_identifiers().encode() + fingerprint = binascii.unhexlify(fingerprint) + return cls(rdclass, rdtype, algorithm, fp_type, fingerprint) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!BB", self.algorithm, self.fp_type) + file.write(header) + file.write(self.fingerprint) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("BB") + fingerprint = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], fingerprint) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/TKEY.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/TKEY.py new file mode 100644 index 0000000..861fc4e --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/TKEY.py @@ -0,0 +1,118 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.dnssec +import dns.immutable +import dns.exception +import dns.rdata + + +@dns.immutable.immutable +class TKEY(dns.rdata.Rdata): + + """TKEY Record""" + + __slots__ = ['algorithm', 'inception', 'expiration', 'mode', 'error', + 'key', 'other'] + + def __init__(self, rdclass, rdtype, algorithm, inception, expiration, + mode, error, key, other=b''): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_name(algorithm) + self.inception = self._as_uint32(inception) + self.expiration = self._as_uint32(expiration) + self.mode = self._as_uint16(mode) + self.error = self._as_uint16(error) + self.key = self._as_bytes(key) + self.other = self._as_bytes(other) + + def to_text(self, origin=None, relativize=True, **kw): + _algorithm = self.algorithm.choose_relativity(origin, relativize) + text = '%s %u %u %u %u %s' % (str(_algorithm), self.inception, + self.expiration, self.mode, self.error, + dns.rdata._base64ify(self.key, 0)) + if len(self.other) > 0: + text += ' %s' % (dns.rdata._base64ify(self.other, 0)) + + return text + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + algorithm = tok.get_name(relativize=False) + inception = tok.get_uint32() + expiration = tok.get_uint32() + mode = tok.get_uint16() + error = tok.get_uint16() + key_b64 = tok.get_string().encode() + key = base64.b64decode(key_b64) + other_b64 = tok.concatenate_remaining_identifiers(True).encode() + other = base64.b64decode(other_b64) + + return cls(rdclass, rdtype, algorithm, inception, expiration, mode, + error, key, other) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.algorithm.to_wire(file, compress, origin) + file.write(struct.pack("!IIHH", self.inception, self.expiration, + self.mode, self.error)) + file.write(struct.pack("!H", len(self.key))) + file.write(self.key) + file.write(struct.pack("!H", len(self.other))) + if len(self.other) > 0: + file.write(self.other) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + algorithm = parser.get_name(origin) + inception, expiration, mode, error = parser.get_struct("!IIHH") + key = parser.get_counted_bytes(2) + other = parser.get_counted_bytes(2) + + return cls(rdclass, rdtype, algorithm, inception, expiration, mode, + error, key, other) + + # Constants for the mode field - from RFC 2930: + # 2.5 The Mode Field + # + # The mode field specifies the general scheme for key agreement or + # the purpose of the TKEY DNS message. Servers and resolvers + # supporting this specification MUST implement the Diffie-Hellman key + # agreement mode and the key deletion mode for queries. All other + # modes are OPTIONAL. A server supporting TKEY that receives a TKEY + # request with a mode it does not support returns the BADMODE error. + # The following values of the Mode octet are defined, available, or + # reserved: + # + # Value Description + # ----- ----------- + # 0 - reserved, see section 7 + # 1 server assignment + # 2 Diffie-Hellman exchange + # 3 GSS-API negotiation + # 4 resolver assignment + # 5 key deletion + # 6-65534 - available, see section 7 + # 65535 - reserved, see section 7 + SERVER_ASSIGNMENT = 1 + DIFFIE_HELLMAN_EXCHANGE = 2 + GSSAPI_NEGOTIATION = 3 + RESOLVER_ASSIGNMENT = 4 + KEY_DELETION = 5 diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/TLSA.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/TLSA.py new file mode 100644 index 0000000..c9ba199 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/TLSA.py @@ -0,0 +1,10 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.tlsabase + + +@dns.immutable.immutable +class TLSA(dns.rdtypes.tlsabase.TLSABase): + + """TLSA record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/TSIG.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/TSIG.py new file mode 100644 index 0000000..b43a78f --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/TSIG.py @@ -0,0 +1,120 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.exception +import dns.immutable +import dns.rcode +import dns.rdata + + +@dns.immutable.immutable +class TSIG(dns.rdata.Rdata): + + """TSIG record""" + + __slots__ = ['algorithm', 'time_signed', 'fudge', 'mac', + 'original_id', 'error', 'other'] + + def __init__(self, rdclass, rdtype, algorithm, time_signed, fudge, mac, + original_id, error, other): + """Initialize a TSIG rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + + *algorithm*, a ``dns.name.Name``. + + *time_signed*, an ``int``. + + *fudge*, an ``int`. + + *mac*, a ``bytes`` + + *original_id*, an ``int`` + + *error*, an ``int`` + + *other*, a ``bytes`` + """ + + super().__init__(rdclass, rdtype) + self.algorithm = self._as_name(algorithm) + self.time_signed = self._as_uint48(time_signed) + self.fudge = self._as_uint16(fudge) + self.mac = self._as_bytes(mac) + self.original_id = self._as_uint16(original_id) + self.error = dns.rcode.Rcode.make(error) + self.other = self._as_bytes(other) + + def to_text(self, origin=None, relativize=True, **kw): + algorithm = self.algorithm.choose_relativity(origin, relativize) + error = dns.rcode.to_text(self.error, True) + text = f"{algorithm} {self.time_signed} {self.fudge} " + \ + f"{len(self.mac)} {dns.rdata._base64ify(self.mac, 0)} " + \ + f"{self.original_id} {error} {len(self.other)}" + if self.other: + text += f" {dns.rdata._base64ify(self.other, 0)}" + return text + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + algorithm = tok.get_name(relativize=False) + time_signed = tok.get_uint48() + fudge = tok.get_uint16() + mac_len = tok.get_uint16() + mac = base64.b64decode(tok.get_string()) + if len(mac) != mac_len: + raise SyntaxError('invalid MAC') + original_id = tok.get_uint16() + error = dns.rcode.from_text(tok.get_string()) + other_len = tok.get_uint16() + if other_len > 0: + other = base64.b64decode(tok.get_string()) + if len(other) != other_len: + raise SyntaxError('invalid other data') + else: + other = b'' + return cls(rdclass, rdtype, algorithm, time_signed, fudge, mac, + original_id, error, other) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.algorithm.to_wire(file, None, origin, False) + file.write(struct.pack('!HIHH', + (self.time_signed >> 32) & 0xffff, + self.time_signed & 0xffffffff, + self.fudge, + len(self.mac))) + file.write(self.mac) + file.write(struct.pack('!HHH', self.original_id, self.error, + len(self.other))) + file.write(self.other) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + algorithm = parser.get_name() + time_signed = parser.get_uint48() + fudge = parser.get_uint16() + mac = parser.get_counted_bytes(2) + (original_id, error) = parser.get_struct('!HH') + other = parser.get_counted_bytes(2) + return cls(rdclass, rdtype, algorithm, time_signed, fudge, mac, + original_id, error, other) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/TXT.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/TXT.py new file mode 100644 index 0000000..cc4b661 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/TXT.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.txtbase +import dns.immutable + + +@dns.immutable.immutable +class TXT(dns.rdtypes.txtbase.TXTBase): + + """TXT record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/URI.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/URI.py new file mode 100644 index 0000000..524fa1b --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/URI.py @@ -0,0 +1,80 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# Copyright (C) 2015 Red Hat, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdtypes.util +import dns.name + + +@dns.immutable.immutable +class URI(dns.rdata.Rdata): + + """URI record""" + + # see RFC 7553 + + __slots__ = ['priority', 'weight', 'target'] + + def __init__(self, rdclass, rdtype, priority, weight, target): + super().__init__(rdclass, rdtype) + self.priority = self._as_uint16(priority) + self.weight = self._as_uint16(weight) + self.target = self._as_bytes(target, True) + if len(self.target) == 0: + raise dns.exception.SyntaxError("URI target cannot be empty") + + def to_text(self, origin=None, relativize=True, **kw): + return '%d %d "%s"' % (self.priority, self.weight, + self.target.decode()) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + priority = tok.get_uint16() + weight = tok.get_uint16() + target = tok.get().unescape() + if not (target.is_quoted_string() or target.is_identifier()): + raise dns.exception.SyntaxError("URI target must be a string") + return cls(rdclass, rdtype, priority, weight, target.value) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + two_ints = struct.pack("!HH", self.priority, self.weight) + file.write(two_ints) + file.write(self.target) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (priority, weight) = parser.get_struct('!HH') + target = parser.get_remaining() + if len(target) == 0: + raise dns.exception.FormError('URI target may not be empty') + return cls(rdclass, rdtype, priority, weight, target) + + def _processing_priority(self): + return self.priority + + def _processing_weight(self): + return self.weight + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.weighted_processing_order(iterable) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/X25.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/X25.py new file mode 100644 index 0000000..4f7230c --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/X25.py @@ -0,0 +1,57 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class X25(dns.rdata.Rdata): + + """X25 record""" + + # see RFC 1183 + + __slots__ = ['address'] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_bytes(address, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + return '"%s"' % dns.rdata._escapify(self.address) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + address = tok.get_string() + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.address) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.address) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_counted_bytes() + return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/ZONEMD.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/ZONEMD.py new file mode 100644 index 0000000..035f7b3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/ZONEMD.py @@ -0,0 +1,65 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct +import binascii + +import dns.immutable +import dns.rdata +import dns.rdatatype +import dns.zone + + +@dns.immutable.immutable +class ZONEMD(dns.rdata.Rdata): + + """ZONEMD record""" + + # See RFC 8976 + + __slots__ = ['serial', 'scheme', 'hash_algorithm', 'digest'] + + def __init__(self, rdclass, rdtype, serial, scheme, hash_algorithm, digest): + super().__init__(rdclass, rdtype) + self.serial = self._as_uint32(serial) + self.scheme = dns.zone.DigestScheme.make(scheme) + self.hash_algorithm = dns.zone.DigestHashAlgorithm.make(hash_algorithm) + self.digest = self._as_bytes(digest) + + if self.scheme == 0: # reserved, RFC 8976 Sec. 5.2 + raise ValueError('scheme 0 is reserved') + if self.hash_algorithm == 0: # reserved, RFC 8976 Sec. 5.3 + raise ValueError('hash_algorithm 0 is reserved') + + hasher = dns.zone._digest_hashers.get(self.hash_algorithm) + if hasher and hasher().digest_size != len(self.digest): + raise ValueError('digest length inconsistent with hash algorithm') + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop('chunksize', 128) + return '%d %d %d %s' % (self.serial, self.scheme, self.hash_algorithm, + dns.rdata._hexify(self.digest, + chunksize=chunksize, + **kw)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + serial = tok.get_uint32() + scheme = tok.get_uint8() + hash_algorithm = tok.get_uint8() + digest = tok.concatenate_remaining_identifiers().encode() + digest = binascii.unhexlify(digest) + return cls(rdclass, rdtype, serial, scheme, hash_algorithm, digest) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!IBB", self.serial, self.scheme, + self.hash_algorithm) + file.write(header) + file.write(self.digest) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!IBB") + digest = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__init__.py b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__init__.py new file mode 100644 index 0000000..2cadcde --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__init__.py @@ -0,0 +1,68 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class ANY (generic) rdata type classes.""" + +__all__ = [ + 'AFSDB', + 'AMTRELAY', + 'AVC', + 'CAA', + 'CDNSKEY', + 'CDS', + 'CERT', + 'CNAME', + 'CSYNC', + 'DLV', + 'DNAME', + 'DNSKEY', + 'DS', + 'EUI48', + 'EUI64', + 'GPOS', + 'HINFO', + 'HIP', + 'ISDN', + 'L32', + 'L64', + 'LOC', + 'LP', + 'MX', + 'NID', + 'NINFO', + 'NS', + 'NSEC', + 'NSEC3', + 'NSEC3PARAM', + 'OPENPGPKEY', + 'OPT', + 'PTR', + 'RP', + 'RRSIG', + 'RT', + 'SMIMEA', + 'SOA', + 'SPF', + 'SSHFP', + 'TKEY', + 'TLSA', + 'TSIG', + 'TXT', + 'URI', + 'X25', + 'ZONEMD', +] diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-310.pyc new file mode 100644 index 0000000..ca773fa Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-310.pyc new file mode 100644 index 0000000..d05ecc5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-310.pyc new file mode 100644 index 0000000..29c9c91 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-310.pyc new file mode 100644 index 0000000..e93b941 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-310.pyc new file mode 100644 index 0000000..39724c1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-310.pyc new file mode 100644 index 0000000..63d84c8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-310.pyc new file mode 100644 index 0000000..a4abcb5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-310.pyc new file mode 100644 index 0000000..2cfca0b Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-310.pyc new file mode 100644 index 0000000..4264fdb Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-310.pyc new file mode 100644 index 0000000..eddcf00 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-310.pyc new file mode 100644 index 0000000..d44ce0b Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-310.pyc new file mode 100644 index 0000000..0dd4de0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-310.pyc new file mode 100644 index 0000000..8be773d Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-310.pyc new file mode 100644 index 0000000..de3857a Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-310.pyc new file mode 100644 index 0000000..fb825a1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-310.pyc new file mode 100644 index 0000000..0eb6d4d Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-310.pyc new file mode 100644 index 0000000..3d5bd2a Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-310.pyc new file mode 100644 index 0000000..9c058f3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-310.pyc new file mode 100644 index 0000000..3504522 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-310.pyc new file mode 100644 index 0000000..5aa78d1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-310.pyc new file mode 100644 index 0000000..c42439e Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-310.pyc new file mode 100644 index 0000000..eb6fe0c Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-310.pyc new file mode 100644 index 0000000..a088c1b Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-310.pyc new file mode 100644 index 0000000..f746cad Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-310.pyc new file mode 100644 index 0000000..9a18715 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-310.pyc new file mode 100644 index 0000000..a445274 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-310.pyc new file mode 100644 index 0000000..af2b0de Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-310.pyc new file mode 100644 index 0000000..375c041 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-310.pyc new file mode 100644 index 0000000..9bf577d Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-310.pyc new file mode 100644 index 0000000..9df1ff8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-310.pyc new file mode 100644 index 0000000..f1259c0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-310.pyc new file mode 100644 index 0000000..a61ca49 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-310.pyc new file mode 100644 index 0000000..fdc7e8f Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-310.pyc new file mode 100644 index 0000000..4abbf10 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-310.pyc new file mode 100644 index 0000000..920ae32 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-310.pyc new file mode 100644 index 0000000..ac94eb5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-310.pyc new file mode 100644 index 0000000..835d48d Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-310.pyc new file mode 100644 index 0000000..05eba5b Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-310.pyc new file mode 100644 index 0000000..b711c5d Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-310.pyc new file mode 100644 index 0000000..32c4db9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-310.pyc new file mode 100644 index 0000000..cff122a Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-310.pyc new file mode 100644 index 0000000..389b67c Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-310.pyc new file mode 100644 index 0000000..7d7aa7a Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-310.pyc new file mode 100644 index 0000000..d814a83 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-310.pyc new file mode 100644 index 0000000..e209aa7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-310.pyc new file mode 100644 index 0000000..6cca579 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-310.pyc new file mode 100644 index 0000000..b71ae28 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..5d9f697 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/CH/A.py b/venv/lib/python3.10/site-packages/dns/rdtypes/CH/A.py new file mode 100644 index 0000000..828701b --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/CH/A.py @@ -0,0 +1,58 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.rdtypes.mxbase +import dns.immutable + +@dns.immutable.immutable +class A(dns.rdata.Rdata): + + """A record for Chaosnet""" + + # domain: the domain of the address + # address: the 16-bit address + + __slots__ = ['domain', 'address'] + + def __init__(self, rdclass, rdtype, domain, address): + super().__init__(rdclass, rdtype) + self.domain = self._as_name(domain) + self.address = self._as_uint16(address) + + def to_text(self, origin=None, relativize=True, **kw): + domain = self.domain.choose_relativity(origin, relativize) + return '%s %o' % (domain, self.address) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + domain = tok.get_name(origin, relativize, relativize_to) + address = tok.get_uint16(base=8) + return cls(rdclass, rdtype, domain, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.domain.to_wire(file, compress, origin, canonicalize) + pref = struct.pack("!H", self.address) + file.write(pref) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + domain = parser.get_name(origin) + address = parser.get_uint16() + return cls(rdclass, rdtype, domain, address) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/CH/__init__.py b/venv/lib/python3.10/site-packages/dns/rdtypes/CH/__init__.py new file mode 100644 index 0000000..7184a73 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/CH/__init__.py @@ -0,0 +1,22 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class CH rdata type classes.""" + +__all__ = [ + 'A', +] diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-310.pyc new file mode 100644 index 0000000..c0578b9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..bd5e4c4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/A.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/A.py new file mode 100644 index 0000000..74b591e --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/A.py @@ -0,0 +1,51 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class A(dns.rdata.Rdata): + + """A record.""" + + __slots__ = ['address'] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_ipv4_address(address) + + def to_text(self, origin=None, relativize=True, **kw): + return self.address + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + address = tok.get_identifier() + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(dns.ipv4.inet_aton(self.address)) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_remaining() + return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/AAAA.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/AAAA.py new file mode 100644 index 0000000..2d3ec90 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/AAAA.py @@ -0,0 +1,51 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.ipv6 +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class AAAA(dns.rdata.Rdata): + + """AAAA record.""" + + __slots__ = ['address'] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_ipv6_address(address) + + def to_text(self, origin=None, relativize=True, **kw): + return self.address + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + address = tok.get_identifier() + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(dns.ipv6.inet_aton(self.address)) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_remaining() + return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/APL.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/APL.py new file mode 100644 index 0000000..ae94fb2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/APL.py @@ -0,0 +1,151 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import codecs +import struct + +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.ipv6 +import dns.rdata +import dns.tokenizer + +@dns.immutable.immutable +class APLItem: + + """An APL list item.""" + + __slots__ = ['family', 'negation', 'address', 'prefix'] + + def __init__(self, family, negation, address, prefix): + self.family = dns.rdata.Rdata._as_uint16(family) + self.negation = dns.rdata.Rdata._as_bool(negation) + if self.family == 1: + self.address = dns.rdata.Rdata._as_ipv4_address(address) + self.prefix = dns.rdata.Rdata._as_int(prefix, 0, 32) + elif self.family == 2: + self.address = dns.rdata.Rdata._as_ipv6_address(address) + self.prefix = dns.rdata.Rdata._as_int(prefix, 0, 128) + else: + self.address = dns.rdata.Rdata._as_bytes(address, max_length=127) + self.prefix = dns.rdata.Rdata._as_uint8(prefix) + + def __str__(self): + if self.negation: + return "!%d:%s/%s" % (self.family, self.address, self.prefix) + else: + return "%d:%s/%s" % (self.family, self.address, self.prefix) + + def to_wire(self, file): + if self.family == 1: + address = dns.ipv4.inet_aton(self.address) + elif self.family == 2: + address = dns.ipv6.inet_aton(self.address) + else: + address = binascii.unhexlify(self.address) + # + # Truncate least significant zero bytes. + # + last = 0 + for i in range(len(address) - 1, -1, -1): + if address[i] != 0: + last = i + 1 + break + address = address[0: last] + l = len(address) + assert l < 128 + if self.negation: + l |= 0x80 + header = struct.pack('!HBB', self.family, self.prefix, l) + file.write(header) + file.write(address) + + +@dns.immutable.immutable +class APL(dns.rdata.Rdata): + + """APL record.""" + + # see: RFC 3123 + + __slots__ = ['items'] + + def __init__(self, rdclass, rdtype, items): + super().__init__(rdclass, rdtype) + for item in items: + if not isinstance(item, APLItem): + raise ValueError('item not an APLItem') + self.items = tuple(items) + + def to_text(self, origin=None, relativize=True, **kw): + return ' '.join(map(str, self.items)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + items = [] + for token in tok.get_remaining(): + item = token.unescape().value + if item[0] == '!': + negation = True + item = item[1:] + else: + negation = False + (family, rest) = item.split(':', 1) + family = int(family) + (address, prefix) = rest.split('/', 1) + prefix = int(prefix) + item = APLItem(family, negation, address, prefix) + items.append(item) + + return cls(rdclass, rdtype, items) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + for item in self.items: + item.to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + + items = [] + while parser.remaining() > 0: + header = parser.get_struct('!HBB') + afdlen = header[2] + if afdlen > 127: + negation = True + afdlen -= 128 + else: + negation = False + address = parser.get_bytes(afdlen) + l = len(address) + if header[0] == 1: + if l < 4: + address += b'\x00' * (4 - l) + elif header[0] == 2: + if l < 16: + address += b'\x00' * (16 - l) + else: + # + # This isn't really right according to the RFC, but it + # seems better than throwing an exception + # + address = codecs.encode(address, 'hex_codec') + item = APLItem(header[0], negation, address, header[1]) + items.append(item) + return cls(rdclass, rdtype, items) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/DHCID.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/DHCID.py new file mode 100644 index 0000000..a918598 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/DHCID.py @@ -0,0 +1,53 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 + +import dns.exception +import dns.immutable + + +@dns.immutable.immutable +class DHCID(dns.rdata.Rdata): + + """DHCID record""" + + # see: RFC 4701 + + __slots__ = ['data'] + + def __init__(self, rdclass, rdtype, data): + super().__init__(rdclass, rdtype) + self.data = self._as_bytes(data) + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._base64ify(self.data, **kw) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + b64 = tok.concatenate_remaining_identifiers().encode() + data = base64.b64decode(b64) + return cls(rdclass, rdtype, data) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.data) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + data = parser.get_remaining() + return cls(rdclass, rdtype, data) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/HTTPS.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/HTTPS.py new file mode 100644 index 0000000..6a67e8e --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/HTTPS.py @@ -0,0 +1,8 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.rdtypes.svcbbase +import dns.immutable + +@dns.immutable.immutable +class HTTPS(dns.rdtypes.svcbbase.SVCBBase): + """HTTPS record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/IPSECKEY.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/IPSECKEY.py new file mode 100644 index 0000000..d1d3943 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/IPSECKEY.py @@ -0,0 +1,83 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import base64 + +import dns.exception +import dns.immutable +import dns.rdtypes.util + + +class Gateway(dns.rdtypes.util.Gateway): + name = 'IPSECKEY gateway' + +@dns.immutable.immutable +class IPSECKEY(dns.rdata.Rdata): + + """IPSECKEY record""" + + # see: RFC 4025 + + __slots__ = ['precedence', 'gateway_type', 'algorithm', 'gateway', 'key'] + + def __init__(self, rdclass, rdtype, precedence, gateway_type, algorithm, + gateway, key): + super().__init__(rdclass, rdtype) + gateway = Gateway(gateway_type, gateway) + self.precedence = self._as_uint8(precedence) + self.gateway_type = gateway.type + self.algorithm = self._as_uint8(algorithm) + self.gateway = gateway.gateway + self.key = self._as_bytes(key) + + def to_text(self, origin=None, relativize=True, **kw): + gateway = Gateway(self.gateway_type, self.gateway).to_text(origin, + relativize) + return '%d %d %d %s %s' % (self.precedence, self.gateway_type, + self.algorithm, gateway, + dns.rdata._base64ify(self.key, **kw)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + precedence = tok.get_uint8() + gateway_type = tok.get_uint8() + algorithm = tok.get_uint8() + gateway = Gateway.from_text(gateway_type, tok, origin, relativize, + relativize_to) + b64 = tok.concatenate_remaining_identifiers().encode() + key = base64.b64decode(b64) + return cls(rdclass, rdtype, precedence, gateway_type, algorithm, + gateway.gateway, key) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!BBB", self.precedence, self.gateway_type, + self.algorithm) + file.write(header) + Gateway(self.gateway_type, self.gateway).to_wire(file, compress, + origin, canonicalize) + file.write(self.key) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct('!BBB') + gateway_type = header[1] + gateway = Gateway.from_wire_parser(gateway_type, parser, origin) + key = parser.get_remaining() + return cls(rdclass, rdtype, header[0], gateway_type, header[2], + gateway.gateway, key) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/KX.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/KX.py new file mode 100644 index 0000000..c27e921 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/KX.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.mxbase +import dns.immutable + + +@dns.immutable.immutable +class KX(dns.rdtypes.mxbase.UncompressedDowncasingMX): + + """KX record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/NAPTR.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/NAPTR.py new file mode 100644 index 0000000..b107974 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/NAPTR.py @@ -0,0 +1,99 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +def _write_string(file, s): + l = len(s) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(s) + + +@dns.immutable.immutable +class NAPTR(dns.rdata.Rdata): + + """NAPTR record""" + + # see: RFC 3403 + + __slots__ = ['order', 'preference', 'flags', 'service', 'regexp', + 'replacement'] + + def __init__(self, rdclass, rdtype, order, preference, flags, service, + regexp, replacement): + super().__init__(rdclass, rdtype) + self.flags = self._as_bytes(flags, True, 255) + self.service = self._as_bytes(service, True, 255) + self.regexp = self._as_bytes(regexp, True, 255) + self.order = self._as_uint16(order) + self.preference = self._as_uint16(preference) + self.replacement = self._as_name(replacement) + + def to_text(self, origin=None, relativize=True, **kw): + replacement = self.replacement.choose_relativity(origin, relativize) + return '%d %d "%s" "%s" "%s" %s' % \ + (self.order, self.preference, + dns.rdata._escapify(self.flags), + dns.rdata._escapify(self.service), + dns.rdata._escapify(self.regexp), + replacement) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + order = tok.get_uint16() + preference = tok.get_uint16() + flags = tok.get_string() + service = tok.get_string() + regexp = tok.get_string() + replacement = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, order, preference, flags, service, + regexp, replacement) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + two_ints = struct.pack("!HH", self.order, self.preference) + file.write(two_ints) + _write_string(file, self.flags) + _write_string(file, self.service) + _write_string(file, self.regexp) + self.replacement.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (order, preference) = parser.get_struct('!HH') + strings = [] + for _ in range(3): + s = parser.get_counted_bytes() + strings.append(s) + replacement = parser.get_name(origin) + return cls(rdclass, rdtype, order, preference, strings[0], strings[1], + strings[2], replacement) + + def _processing_priority(self): + return (self.order, self.preference) + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/NSAP.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/NSAP.py new file mode 100644 index 0000000..23ae9b1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/NSAP.py @@ -0,0 +1,60 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class NSAP(dns.rdata.Rdata): + + """NSAP record.""" + + # see: RFC 1706 + + __slots__ = ['address'] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_bytes(address) + + def to_text(self, origin=None, relativize=True, **kw): + return "0x%s" % binascii.hexlify(self.address).decode() + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + address = tok.get_string() + if address[0:2] != '0x': + raise dns.exception.SyntaxError('string does not start with 0x') + address = address[2:].replace('.', '') + if len(address) % 2 != 0: + raise dns.exception.SyntaxError('hexstring has odd length') + address = binascii.unhexlify(address.encode()) + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.address) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_remaining() + return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/NSAP_PTR.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/NSAP_PTR.py new file mode 100644 index 0000000..57dadd4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/NSAP_PTR.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.nsbase +import dns.immutable + + +@dns.immutable.immutable +class NSAP_PTR(dns.rdtypes.nsbase.UncompressedNS): + + """NSAP-PTR record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/PX.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/PX.py new file mode 100644 index 0000000..113d409 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/PX.py @@ -0,0 +1,73 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdtypes.util +import dns.name + + +@dns.immutable.immutable +class PX(dns.rdata.Rdata): + + """PX record.""" + + # see: RFC 2163 + + __slots__ = ['preference', 'map822', 'mapx400'] + + def __init__(self, rdclass, rdtype, preference, map822, mapx400): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.map822 = self._as_name(map822) + self.mapx400 = self._as_name(mapx400) + + def to_text(self, origin=None, relativize=True, **kw): + map822 = self.map822.choose_relativity(origin, relativize) + mapx400 = self.mapx400.choose_relativity(origin, relativize) + return '%d %s %s' % (self.preference, map822, mapx400) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + preference = tok.get_uint16() + map822 = tok.get_name(origin, relativize, relativize_to) + mapx400 = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, preference, map822, mapx400) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + pref = struct.pack("!H", self.preference) + file.write(pref) + self.map822.to_wire(file, None, origin, canonicalize) + self.mapx400.to_wire(file, None, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + map822 = parser.get_name(origin) + mapx400 = parser.get_name(origin) + return cls(rdclass, rdtype, preference, map822, mapx400) + + def _processing_priority(self): + return self.preference + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/SRV.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/SRV.py new file mode 100644 index 0000000..5b5ff42 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/SRV.py @@ -0,0 +1,76 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdtypes.util +import dns.name + + +@dns.immutable.immutable +class SRV(dns.rdata.Rdata): + + """SRV record""" + + # see: RFC 2782 + + __slots__ = ['priority', 'weight', 'port', 'target'] + + def __init__(self, rdclass, rdtype, priority, weight, port, target): + super().__init__(rdclass, rdtype) + self.priority = self._as_uint16(priority) + self.weight = self._as_uint16(weight) + self.port = self._as_uint16(port) + self.target = self._as_name(target) + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + return '%d %d %d %s' % (self.priority, self.weight, self.port, + target) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + priority = tok.get_uint16() + weight = tok.get_uint16() + port = tok.get_uint16() + target = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, priority, weight, port, target) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + three_ints = struct.pack("!HHH", self.priority, self.weight, self.port) + file.write(three_ints) + self.target.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (priority, weight, port) = parser.get_struct('!HHH') + target = parser.get_name(origin) + return cls(rdclass, rdtype, priority, weight, port, target) + + def _processing_priority(self): + return self.priority + + def _processing_weight(self): + return self.weight + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.weighted_processing_order(iterable) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/SVCB.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/SVCB.py new file mode 100644 index 0000000..14838e1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/SVCB.py @@ -0,0 +1,8 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.rdtypes.svcbbase +import dns.immutable + +@dns.immutable.immutable +class SVCB(dns.rdtypes.svcbbase.SVCBBase): + """SVCB record""" diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/WKS.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/WKS.py new file mode 100644 index 0000000..264e45d --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/WKS.py @@ -0,0 +1,99 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import socket +import struct + +import dns.ipv4 +import dns.immutable +import dns.rdata + +try: + _proto_tcp = socket.getprotobyname('tcp') + _proto_udp = socket.getprotobyname('udp') +except OSError: + # Fall back to defaults in case /etc/protocols is unavailable. + _proto_tcp = 6 + _proto_udp = 17 + +@dns.immutable.immutable +class WKS(dns.rdata.Rdata): + + """WKS record""" + + # see: RFC 1035 + + __slots__ = ['address', 'protocol', 'bitmap'] + + def __init__(self, rdclass, rdtype, address, protocol, bitmap): + super().__init__(rdclass, rdtype) + self.address = self._as_ipv4_address(address) + self.protocol = self._as_uint8(protocol) + self.bitmap = self._as_bytes(bitmap) + + def to_text(self, origin=None, relativize=True, **kw): + bits = [] + for i, byte in enumerate(self.bitmap): + for j in range(0, 8): + if byte & (0x80 >> j): + bits.append(str(i * 8 + j)) + text = ' '.join(bits) + return '%s %d %s' % (self.address, self.protocol, text) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + address = tok.get_string() + protocol = tok.get_string() + if protocol.isdigit(): + protocol = int(protocol) + else: + protocol = socket.getprotobyname(protocol) + bitmap = bytearray() + for token in tok.get_remaining(): + value = token.unescape().value + if value.isdigit(): + serv = int(value) + else: + if protocol != _proto_udp and protocol != _proto_tcp: + raise NotImplementedError("protocol must be TCP or UDP") + if protocol == _proto_udp: + protocol_text = "udp" + else: + protocol_text = "tcp" + serv = socket.getservbyname(value, protocol_text) + i = serv // 8 + l = len(bitmap) + if l < i + 1: + for _ in range(l, i + 1): + bitmap.append(0) + bitmap[i] = bitmap[i] | (0x80 >> (serv % 8)) + bitmap = dns.rdata._truncate_bitmap(bitmap) + return cls(rdclass, rdtype, address, protocol, bitmap) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(dns.ipv4.inet_aton(self.address)) + protocol = struct.pack('!B', self.protocol) + file.write(protocol) + file.write(self.bitmap) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_bytes(4) + protocol = parser.get_uint8() + bitmap = parser.get_remaining() + return cls(rdclass, rdtype, address, protocol, bitmap) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__init__.py b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__init__.py new file mode 100644 index 0000000..d51b99e --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__init__.py @@ -0,0 +1,35 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class IN rdata type classes.""" + +__all__ = [ + 'A', + 'AAAA', + 'APL', + 'DHCID', + 'HTTPS', + 'IPSECKEY', + 'KX', + 'NAPTR', + 'NSAP', + 'NSAP_PTR', + 'PX', + 'SRV', + 'SVCB', + 'WKS', +] diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-310.pyc new file mode 100644 index 0000000..f4dcf1b Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-310.pyc new file mode 100644 index 0000000..9fc362a Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-310.pyc new file mode 100644 index 0000000..b59fc3a Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-310.pyc new file mode 100644 index 0000000..a519f00 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-310.pyc new file mode 100644 index 0000000..74f3ad8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-310.pyc new file mode 100644 index 0000000..c73cc41 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-310.pyc new file mode 100644 index 0000000..04050a4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-310.pyc new file mode 100644 index 0000000..be84a00 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-310.pyc new file mode 100644 index 0000000..97378db Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-310.pyc new file mode 100644 index 0000000..8d5d5bb Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-310.pyc new file mode 100644 index 0000000..b9cc79e Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-310.pyc new file mode 100644 index 0000000..0870565 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-310.pyc new file mode 100644 index 0000000..dc207d5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-310.pyc new file mode 100644 index 0000000..a5fce35 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..256d24c Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/__init__.py b/venv/lib/python3.10/site-packages/dns/rdtypes/__init__.py new file mode 100644 index 0000000..c3af264 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/__init__.py @@ -0,0 +1,33 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdata type classes""" + +__all__ = [ + 'ANY', + 'IN', + 'CH', + 'dnskeybase', + 'dsbase', + 'euibase', + 'mxbase', + 'nsbase', + 'svcbbase', + 'tlsabase', + 'txtbase', + 'util' +] diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..d38b428 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-310.pyc new file mode 100644 index 0000000..e51d6f0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-310.pyc new file mode 100644 index 0000000..40f5015 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/euibase.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/euibase.cpython-310.pyc new file mode 100644 index 0000000..1e2b743 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/euibase.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-310.pyc new file mode 100644 index 0000000..b87c8e0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-310.pyc new file mode 100644 index 0000000..c68a533 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-310.pyc new file mode 100644 index 0000000..a0c2b53 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-310.pyc new file mode 100644 index 0000000..276b2fd Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-310.pyc new file mode 100644 index 0000000..7ff2f2c Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/util.cpython-310.pyc b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000..6e19eb2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dns/rdtypes/__pycache__/util.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/dnskeybase.py b/venv/lib/python3.10/site-packages/dns/rdtypes/dnskeybase.py new file mode 100644 index 0000000..788bb2b --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/dnskeybase.py @@ -0,0 +1,82 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import enum +import struct + +import dns.exception +import dns.immutable +import dns.dnssec +import dns.rdata + +# wildcard import +__all__ = ["SEP", "REVOKE", "ZONE"] # noqa: F822 + +class Flag(enum.IntFlag): + SEP = 0x0001 + REVOKE = 0x0080 + ZONE = 0x0100 + + +@dns.immutable.immutable +class DNSKEYBase(dns.rdata.Rdata): + + """Base class for rdata that is like a DNSKEY record""" + + __slots__ = ['flags', 'protocol', 'algorithm', 'key'] + + def __init__(self, rdclass, rdtype, flags, protocol, algorithm, key): + super().__init__(rdclass, rdtype) + self.flags = self._as_uint16(flags) + self.protocol = self._as_uint8(protocol) + self.algorithm = dns.dnssec.Algorithm.make(algorithm) + self.key = self._as_bytes(key) + + def to_text(self, origin=None, relativize=True, **kw): + return '%d %d %d %s' % (self.flags, self.protocol, self.algorithm, + dns.rdata._base64ify(self.key, **kw)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + flags = tok.get_uint16() + protocol = tok.get_uint8() + algorithm = tok.get_string() + b64 = tok.concatenate_remaining_identifiers().encode() + key = base64.b64decode(b64) + return cls(rdclass, rdtype, flags, protocol, algorithm, key) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!HBB", self.flags, self.protocol, self.algorithm) + file.write(header) + file.write(self.key) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct('!HBB') + key = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], + key) + +### BEGIN generated Flag constants + +SEP = Flag.SEP +REVOKE = Flag.REVOKE +ZONE = Flag.ZONE + +### END generated Flag constants diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/dnskeybase.pyi b/venv/lib/python3.10/site-packages/dns/rdtypes/dnskeybase.pyi new file mode 100644 index 0000000..1b999cf --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/dnskeybase.pyi @@ -0,0 +1,38 @@ +from typing import Set, Any + +SEP : int +REVOKE : int +ZONE : int + +def flags_to_text_set(flags : int) -> Set[str]: + ... + +def flags_from_text_set(texts_set) -> int: + ... + +from .. import rdata + +class DNSKEYBase(rdata.Rdata): + def __init__(self, rdclass, rdtype, flags, protocol, algorithm, key): + self.flags : int + self.protocol : int + self.key : str + self.algorithm : int + + def to_text(self, origin : Any = None, relativize=True, **kw : Any): + ... + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + ... + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + ... + + @classmethod + def from_parser(cls, rdclass, rdtype, parser, origin=None): + ... + + def flags_to_text_set(self) -> Set[str]: + ... diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/dsbase.py b/venv/lib/python3.10/site-packages/dns/rdtypes/dsbase.py new file mode 100644 index 0000000..0c2e747 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/dsbase.py @@ -0,0 +1,86 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2010, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import binascii + +import dns.dnssec +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class DSBase(dns.rdata.Rdata): + + """Base class for rdata that is like a DS record""" + + __slots__ = ['key_tag', 'algorithm', 'digest_type', 'digest'] + + # Digest types registry: https://www.iana.org/assignments/ds-rr-types/ds-rr-types.xhtml + _digest_length_by_type = { + 1: 20, # SHA-1, RFC 3658 Sec. 2.4 + 2: 32, # SHA-256, RFC 4509 Sec. 2.2 + 3: 32, # GOST R 34.11-94, RFC 5933 Sec. 4 in conjunction with RFC 4490 Sec. 2.1 + 4: 48, # SHA-384, RFC 6605 Sec. 2 + } + + def __init__(self, rdclass, rdtype, key_tag, algorithm, digest_type, + digest): + super().__init__(rdclass, rdtype) + self.key_tag = self._as_uint16(key_tag) + self.algorithm = dns.dnssec.Algorithm.make(algorithm) + self.digest_type = self._as_uint8(digest_type) + self.digest = self._as_bytes(digest) + try: + if len(self.digest) != self._digest_length_by_type[self.digest_type]: + raise ValueError('digest length inconsistent with digest type') + except KeyError: + if self.digest_type == 0: # reserved, RFC 3658 Sec. 2.4 + raise ValueError('digest type 0 is reserved') + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop('chunksize', 128) + return '%d %d %d %s' % (self.key_tag, self.algorithm, + self.digest_type, + dns.rdata._hexify(self.digest, + chunksize=chunksize, + **kw)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + key_tag = tok.get_uint16() + algorithm = tok.get_string() + digest_type = tok.get_uint8() + digest = tok.concatenate_remaining_identifiers().encode() + digest = binascii.unhexlify(digest) + return cls(rdclass, rdtype, key_tag, algorithm, digest_type, + digest) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!HBB", self.key_tag, self.algorithm, + self.digest_type) + file.write(header) + file.write(self.digest) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!HBB") + digest = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/euibase.py b/venv/lib/python3.10/site-packages/dns/rdtypes/euibase.py new file mode 100644 index 0000000..48b69bd --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/euibase.py @@ -0,0 +1,69 @@ +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii + +import dns.rdata +import dns.immutable + + +@dns.immutable.immutable +class EUIBase(dns.rdata.Rdata): + + """EUIxx record""" + + # see: rfc7043.txt + + __slots__ = ['eui'] + # define these in subclasses + # byte_len = 6 # 0123456789ab (in hex) + # text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab + + def __init__(self, rdclass, rdtype, eui): + super().__init__(rdclass, rdtype) + self.eui = self._as_bytes(eui) + if len(self.eui) != self.byte_len: + raise dns.exception.FormError('EUI%s rdata has to have %s bytes' + % (self.byte_len * 8, self.byte_len)) + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._hexify(self.eui, chunksize=2, separator=b'-', **kw) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + text = tok.get_string() + if len(text) != cls.text_len: + raise dns.exception.SyntaxError( + 'Input text must have %s characters' % cls.text_len) + for i in range(2, cls.byte_len * 3 - 1, 3): + if text[i] != '-': + raise dns.exception.SyntaxError('Dash expected at position %s' + % i) + text = text.replace('-', '') + try: + data = binascii.unhexlify(text.encode()) + except (ValueError, TypeError) as ex: + raise dns.exception.SyntaxError('Hex decoding error: %s' % str(ex)) + return cls(rdclass, rdtype, data) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.eui) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + eui = parser.get_bytes(cls.byte_len) + return cls(rdclass, rdtype, eui) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/mxbase.py b/venv/lib/python3.10/site-packages/dns/rdtypes/mxbase.py new file mode 100644 index 0000000..5641823 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/mxbase.py @@ -0,0 +1,89 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""MX-like base classes.""" + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.name +import dns.rdtypes.util + + +@dns.immutable.immutable +class MXBase(dns.rdata.Rdata): + + """Base class for rdata that is like an MX record.""" + + __slots__ = ['preference', 'exchange'] + + def __init__(self, rdclass, rdtype, preference, exchange): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.exchange = self._as_name(exchange) + + def to_text(self, origin=None, relativize=True, **kw): + exchange = self.exchange.choose_relativity(origin, relativize) + return '%d %s' % (self.preference, exchange) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + preference = tok.get_uint16() + exchange = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, preference, exchange) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + pref = struct.pack("!H", self.preference) + file.write(pref) + self.exchange.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + exchange = parser.get_name(origin) + return cls(rdclass, rdtype, preference, exchange) + + def _processing_priority(self): + return self.preference + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) + + +@dns.immutable.immutable +class UncompressedMX(MXBase): + + """Base class for rdata that is like an MX record, but whose name + is not compressed when converted to DNS wire format, and whose + digestable form is not downcased.""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + super()._to_wire(file, None, origin, False) + + +@dns.immutable.immutable +class UncompressedDowncasingMX(MXBase): + + """Base class for rdata that is like an MX record, but whose name + is not compressed when convert to DNS wire format.""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + super()._to_wire(file, None, origin, canonicalize) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/nsbase.py b/venv/lib/python3.10/site-packages/dns/rdtypes/nsbase.py new file mode 100644 index 0000000..b3e2550 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/nsbase.py @@ -0,0 +1,64 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""NS-like base classes.""" + +import dns.exception +import dns.immutable +import dns.rdata +import dns.name + + +@dns.immutable.immutable +class NSBase(dns.rdata.Rdata): + + """Base class for rdata that is like an NS record.""" + + __slots__ = ['target'] + + def __init__(self, rdclass, rdtype, target): + super().__init__(rdclass, rdtype) + self.target = self._as_name(target) + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + return str(target) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + target = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, target) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.target.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + target = parser.get_name(origin) + return cls(rdclass, rdtype, target) + + +@dns.immutable.immutable +class UncompressedNS(NSBase): + + """Base class for rdata that is like an NS record, but whose name + is not compressed when convert to DNS wire format, and whose + digestable form is not downcased.""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.target.to_wire(file, None, origin, False) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/svcbbase.py b/venv/lib/python3.10/site-packages/dns/rdtypes/svcbbase.py new file mode 100644 index 0000000..3362571 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/svcbbase.py @@ -0,0 +1,555 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import base64 +import enum +import io +import struct + +import dns.enum +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.ipv6 +import dns.name +import dns.rdata +import dns.rdtypes.util +import dns.tokenizer +import dns.wire + +# Until there is an RFC, this module is experimental and may be changed in +# incompatible ways. + + +class UnknownParamKey(dns.exception.DNSException): + """Unknown SVCB ParamKey""" + + +class ParamKey(dns.enum.IntEnum): + """SVCB ParamKey""" + + MANDATORY = 0 + ALPN = 1 + NO_DEFAULT_ALPN = 2 + PORT = 3 + IPV4HINT = 4 + ECH = 5 + IPV6HINT = 6 + + @classmethod + def _maximum(cls): + return 65535 + + @classmethod + def _short_name(cls): + return "SVCBParamKey" + + @classmethod + def _prefix(cls): + return "KEY" + + @classmethod + def _unknown_exception_class(cls): + return UnknownParamKey + + +class Emptiness(enum.IntEnum): + NEVER = 0 + ALWAYS = 1 + ALLOWED = 2 + + +def _validate_key(key): + force_generic = False + if isinstance(key, bytes): + # We decode to latin-1 so we get 0-255 as valid and do NOT interpret + # UTF-8 sequences + key = key.decode('latin-1') + if isinstance(key, str): + if key.lower().startswith('key'): + force_generic = True + if key[3:].startswith('0') and len(key) != 4: + # key has leading zeros + raise ValueError('leading zeros in key') + key = key.replace('-', '_') + return (ParamKey.make(key), force_generic) + +def key_to_text(key): + return ParamKey.to_text(key).replace('_', '-').lower() + +# Like rdata escapify, but escapes ',' too. + +_escaped = b'",\\' + +def _escapify(qstring): + text = '' + for c in qstring: + if c in _escaped: + text += '\\' + chr(c) + elif c >= 0x20 and c < 0x7F: + text += chr(c) + else: + text += '\\%03d' % c + return text + +def _unescape(value): + if value == '': + return value + unescaped = b'' + l = len(value) + i = 0 + while i < l: + c = value[i] + i += 1 + if c == '\\': + if i >= l: # pragma: no cover (can't happen via tokenizer get()) + raise dns.exception.UnexpectedEnd + c = value[i] + i += 1 + if c.isdigit(): + if i >= l: + raise dns.exception.UnexpectedEnd + c2 = value[i] + i += 1 + if i >= l: + raise dns.exception.UnexpectedEnd + c3 = value[i] + i += 1 + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + codepoint = int(c) * 100 + int(c2) * 10 + int(c3) + if codepoint > 255: + raise dns.exception.SyntaxError + unescaped += b'%c' % (codepoint) + continue + unescaped += c.encode() + return unescaped + + +def _split(value): + l = len(value) + i = 0 + items = [] + unescaped = b'' + while i < l: + c = value[i] + i += 1 + if c == ord('\\'): + if i >= l: # pragma: no cover (can't happen via tokenizer get()) + raise dns.exception.UnexpectedEnd + c = value[i] + i += 1 + unescaped += b'%c' % (c) + elif c == ord(','): + items.append(unescaped) + unescaped = b'' + else: + unescaped += b'%c' % (c) + items.append(unescaped) + return items + + +@dns.immutable.immutable +class Param: + """Abstract base class for SVCB parameters""" + + @classmethod + def emptiness(cls): + return Emptiness.NEVER + + +@dns.immutable.immutable +class GenericParam(Param): + """Generic SVCB parameter + """ + def __init__(self, value): + self.value = dns.rdata.Rdata._as_bytes(value, True) + + @classmethod + def emptiness(cls): + return Emptiness.ALLOWED + + @classmethod + def from_value(cls, value): + if value is None or len(value) == 0: + return None + else: + return cls(_unescape(value)) + + def to_text(self): + return '"' + dns.rdata._escapify(self.value) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + value = parser.get_bytes(parser.remaining()) + if len(value) == 0: + return None + else: + return cls(value) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + file.write(self.value) + + +@dns.immutable.immutable +class MandatoryParam(Param): + def __init__(self, keys): + # check for duplicates + keys = sorted([_validate_key(key)[0] for key in keys]) + prior_k = None + for k in keys: + if k == prior_k: + raise ValueError(f'duplicate key {k:d}') + prior_k = k + if k == ParamKey.MANDATORY: + raise ValueError('listed the mandatory key as mandatory') + self.keys = tuple(keys) + + @classmethod + def from_value(cls, value): + keys = [k.encode() for k in value.split(',')] + return cls(keys) + + def to_text(self): + return '"' + ','.join([key_to_text(key) for key in self.keys]) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + keys = [] + last_key = -1 + while parser.remaining() > 0: + key = parser.get_uint16() + if key < last_key: + raise dns.exception.FormError('manadatory keys not ascending') + last_key = key + keys.append(key) + return cls(keys) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for key in self.keys: + file.write(struct.pack('!H', key)) + + +@dns.immutable.immutable +class ALPNParam(Param): + def __init__(self, ids): + self.ids = dns.rdata.Rdata._as_tuple( + ids, lambda x: dns.rdata.Rdata._as_bytes(x, True, 255, False)) + + @classmethod + def from_value(cls, value): + return cls(_split(_unescape(value))) + + def to_text(self): + value = ','.join([_escapify(id) for id in self.ids]) + return '"' + dns.rdata._escapify(value.encode()) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + ids = [] + while parser.remaining() > 0: + id = parser.get_counted_bytes() + ids.append(id) + return cls(ids) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for id in self.ids: + file.write(struct.pack('!B', len(id))) + file.write(id) + + +@dns.immutable.immutable +class NoDefaultALPNParam(Param): + # We don't ever expect to instantiate this class, but we need + # a from_value() and a from_wire_parser(), so we just return None + # from the class methods when things are OK. + + @classmethod + def emptiness(cls): + return Emptiness.ALWAYS + + @classmethod + def from_value(cls, value): + if value is None or value == '': + return None + else: + raise ValueError('no-default-alpn with non-empty value') + + def to_text(self): + raise NotImplementedError # pragma: no cover + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + if parser.remaining() != 0: + raise dns.exception.FormError + return None + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + raise NotImplementedError # pragma: no cover + + +@dns.immutable.immutable +class PortParam(Param): + def __init__(self, port): + self.port = dns.rdata.Rdata._as_uint16(port) + + @classmethod + def from_value(cls, value): + value = int(value) + return cls(value) + + def to_text(self): + return f'"{self.port}"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + port = parser.get_uint16() + return cls(port) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + file.write(struct.pack('!H', self.port)) + + +@dns.immutable.immutable +class IPv4HintParam(Param): + def __init__(self, addresses): + self.addresses = dns.rdata.Rdata._as_tuple( + addresses, dns.rdata.Rdata._as_ipv4_address) + + @classmethod + def from_value(cls, value): + addresses = value.split(',') + return cls(addresses) + + def to_text(self): + return '"' + ','.join(self.addresses) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + addresses = [] + while parser.remaining() > 0: + ip = parser.get_bytes(4) + addresses.append(dns.ipv4.inet_ntoa(ip)) + return cls(addresses) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for address in self.addresses: + file.write(dns.ipv4.inet_aton(address)) + + +@dns.immutable.immutable +class IPv6HintParam(Param): + def __init__(self, addresses): + self.addresses = dns.rdata.Rdata._as_tuple( + addresses, dns.rdata.Rdata._as_ipv6_address) + + @classmethod + def from_value(cls, value): + addresses = value.split(',') + return cls(addresses) + + def to_text(self): + return '"' + ','.join(self.addresses) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + addresses = [] + while parser.remaining() > 0: + ip = parser.get_bytes(16) + addresses.append(dns.ipv6.inet_ntoa(ip)) + return cls(addresses) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for address in self.addresses: + file.write(dns.ipv6.inet_aton(address)) + + +@dns.immutable.immutable +class ECHParam(Param): + def __init__(self, ech): + self.ech = dns.rdata.Rdata._as_bytes(ech, True) + + @classmethod + def from_value(cls, value): + if '\\' in value: + raise ValueError('escape in ECH value') + value = base64.b64decode(value.encode()) + return cls(value) + + def to_text(self): + b64 = base64.b64encode(self.ech).decode('ascii') + return f'"{b64}"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + value = parser.get_bytes(parser.remaining()) + return cls(value) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + file.write(self.ech) + + +_class_for_key = { + ParamKey.MANDATORY: MandatoryParam, + ParamKey.ALPN: ALPNParam, + ParamKey.NO_DEFAULT_ALPN: NoDefaultALPNParam, + ParamKey.PORT: PortParam, + ParamKey.IPV4HINT: IPv4HintParam, + ParamKey.ECH: ECHParam, + ParamKey.IPV6HINT: IPv6HintParam, +} + + +def _validate_and_define(params, key, value): + (key, force_generic) = _validate_key(_unescape(key)) + if key in params: + raise SyntaxError(f'duplicate key "{key:d}"') + cls = _class_for_key.get(key, GenericParam) + emptiness = cls.emptiness() + if value is None: + if emptiness == Emptiness.NEVER: + raise SyntaxError('value cannot be empty') + value = cls.from_value(value) + else: + if force_generic: + value = cls.from_wire_parser(dns.wire.Parser(_unescape(value))) + else: + value = cls.from_value(value) + params[key] = value + + +@dns.immutable.immutable +class SVCBBase(dns.rdata.Rdata): + + """Base class for SVCB-like records""" + + # see: draft-ietf-dnsop-svcb-https-01 + + __slots__ = ['priority', 'target', 'params'] + + def __init__(self, rdclass, rdtype, priority, target, params): + super().__init__(rdclass, rdtype) + self.priority = self._as_uint16(priority) + self.target = self._as_name(target) + for k, v in params.items(): + k = ParamKey.make(k) + if not isinstance(v, Param) and v is not None: + raise ValueError("not a Param") + self.params = dns.immutable.Dict(params) + # Make sure any parameter listed as mandatory is present in the + # record. + mandatory = params.get(ParamKey.MANDATORY) + if mandatory: + for key in mandatory.keys: + # Note we have to say "not in" as we have None as a value + # so a get() and a not None test would be wrong. + if key not in params: + raise ValueError(f'key {key:d} declared mandatory but not ' + 'present') + # The no-default-alpn parameter requires the alpn parameter. + if ParamKey.NO_DEFAULT_ALPN in params: + if ParamKey.ALPN not in params: + raise ValueError('no-default-alpn present, but alpn missing') + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + params = [] + for key in sorted(self.params.keys()): + value = self.params[key] + if value is None: + params.append(key_to_text(key)) + else: + kv = key_to_text(key) + '=' + value.to_text() + params.append(kv) + if len(params) > 0: + space = ' ' + else: + space = '' + return '%d %s%s%s' % (self.priority, target, space, ' '.join(params)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + priority = tok.get_uint16() + target = tok.get_name(origin, relativize, relativize_to) + if priority == 0: + token = tok.get() + if not token.is_eol_or_eof(): + raise SyntaxError('parameters in AliasMode') + tok.unget(token) + params = {} + while True: + token = tok.get() + if token.is_eol_or_eof(): + tok.unget(token) + break + if token.ttype != dns.tokenizer.IDENTIFIER: + raise SyntaxError('parameter is not an identifier') + equals = token.value.find('=') + if equals == len(token.value) - 1: + # 'key=', so next token should be a quoted string without + # any intervening whitespace. + key = token.value[:-1] + token = tok.get(want_leading=True) + if token.ttype != dns.tokenizer.QUOTED_STRING: + raise SyntaxError('whitespace after =') + value = token.value + elif equals > 0: + # key=value + key = token.value[:equals] + value = token.value[equals + 1:] + elif equals == 0: + # =key + raise SyntaxError('parameter cannot start with "="') + else: + # key + key = token.value + value = None + _validate_and_define(params, key, value) + return cls(rdclass, rdtype, priority, target, params) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.priority)) + self.target.to_wire(file, None, origin, False) + for key in sorted(self.params): + file.write(struct.pack("!H", key)) + value = self.params[key] + # placeholder for length (or actual length of empty values) + file.write(struct.pack("!H", 0)) + if value is None: + continue + else: + start = file.tell() + value.to_wire(file, origin) + end = file.tell() + assert end - start < 65536 + file.seek(start - 2) + stuff = struct.pack("!H", end - start) + file.write(stuff) + file.seek(0, io.SEEK_END) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + priority = parser.get_uint16() + target = parser.get_name(origin) + if priority == 0 and parser.remaining() != 0: + raise dns.exception.FormError('parameters in AliasMode') + params = {} + prior_key = -1 + while parser.remaining() > 0: + key = parser.get_uint16() + if key < prior_key: + raise dns.exception.FormError('keys not in order') + prior_key = key + vlen = parser.get_uint16() + pcls = _class_for_key.get(key, GenericParam) + with parser.restrict_to(vlen): + value = pcls.from_wire_parser(parser, origin) + params[key] = value + return cls(rdclass, rdtype, priority, target, params) + + def _processing_priority(self): + return self.priority + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/tlsabase.py b/venv/lib/python3.10/site-packages/dns/rdtypes/tlsabase.py new file mode 100644 index 0000000..786fca5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/tlsabase.py @@ -0,0 +1,72 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import binascii + +import dns.rdata +import dns.immutable +import dns.rdatatype + + +@dns.immutable.immutable +class TLSABase(dns.rdata.Rdata): + + """Base class for TLSA and SMIMEA records""" + + # see: RFC 6698 + + __slots__ = ['usage', 'selector', 'mtype', 'cert'] + + def __init__(self, rdclass, rdtype, usage, selector, + mtype, cert): + super().__init__(rdclass, rdtype) + self.usage = self._as_uint8(usage) + self.selector = self._as_uint8(selector) + self.mtype = self._as_uint8(mtype) + self.cert = self._as_bytes(cert) + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop('chunksize', 128) + return '%d %d %d %s' % (self.usage, + self.selector, + self.mtype, + dns.rdata._hexify(self.cert, + chunksize=chunksize, + **kw)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + usage = tok.get_uint8() + selector = tok.get_uint8() + mtype = tok.get_uint8() + cert = tok.concatenate_remaining_identifiers().encode() + cert = binascii.unhexlify(cert) + return cls(rdclass, rdtype, usage, selector, mtype, cert) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!BBB", self.usage, self.selector, self.mtype) + file.write(header) + file.write(self.cert) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("BBB") + cert = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], cert) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/txtbase.py b/venv/lib/python3.10/site-packages/dns/rdtypes/txtbase.py new file mode 100644 index 0000000..68071ee --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/txtbase.py @@ -0,0 +1,87 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""TXT-like base class.""" + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class TXTBase(dns.rdata.Rdata): + + """Base class for rdata that is like a TXT record (see RFC 1035).""" + + __slots__ = ['strings'] + + def __init__(self, rdclass, rdtype, strings): + """Initialize a TXT-like rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + + *strings*, a tuple of ``bytes`` + """ + super().__init__(rdclass, rdtype) + self.strings = self._as_tuple(strings, + lambda x: self._as_bytes(x, True, 255)) + + def to_text(self, origin=None, relativize=True, **kw): + txt = '' + prefix = '' + for s in self.strings: + txt += '{}"{}"'.format(prefix, dns.rdata._escapify(s)) + prefix = ' ' + return txt + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, + relativize_to=None): + strings = [] + for token in tok.get_remaining(): + token = token.unescape_to_bytes() + # The 'if' below is always true in the current code, but we + # are leaving this check in in case things change some day. + if not (token.is_quoted_string() or + token.is_identifier()): # pragma: no cover + raise dns.exception.SyntaxError("expected a string") + if len(token.value) > 255: + raise dns.exception.SyntaxError("string too long") + strings.append(token.value) + if len(strings) == 0: + raise dns.exception.UnexpectedEnd + return cls(rdclass, rdtype, strings) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + for s in self.strings: + l = len(s) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(s) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + strings = [] + while parser.remaining() > 0: + s = parser.get_counted_bytes() + strings.append(s) + return cls(rdclass, rdtype, strings) diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/txtbase.pyi b/venv/lib/python3.10/site-packages/dns/rdtypes/txtbase.pyi new file mode 100644 index 0000000..f8d5df9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/txtbase.pyi @@ -0,0 +1,12 @@ +import typing +from .. import rdata + +class TXTBase(rdata.Rdata): + strings: typing.Tuple[bytes, ...] + + def __init__(self, rdclass: int, rdtype: int, strings: typing.Iterable[bytes]) -> None: + ... + def to_text(self, origin: typing.Any, relativize: bool, **kw: typing.Any) -> str: + ... +class TXT(TXTBase): + ... diff --git a/venv/lib/python3.10/site-packages/dns/rdtypes/util.py b/venv/lib/python3.10/site-packages/dns/rdtypes/util.py new file mode 100644 index 0000000..9bf8f7e --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rdtypes/util.py @@ -0,0 +1,244 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import collections +import random +import struct + +import dns.exception +import dns.ipv4 +import dns.ipv6 +import dns.name +import dns.rdata + + +class Gateway: + """A helper class for the IPSECKEY gateway and AMTRELAY relay fields""" + name = "" + + def __init__(self, type, gateway=None): + self.type = dns.rdata.Rdata._as_uint8(type) + self.gateway = gateway + self._check() + + @classmethod + def _invalid_type(cls, gateway_type): + return f"invalid {cls.name} type: {gateway_type}" + + def _check(self): + if self.type == 0: + if self.gateway not in (".", None): + raise SyntaxError(f"invalid {self.name} for type 0") + self.gateway = None + elif self.type == 1: + # check that it's OK + dns.ipv4.inet_aton(self.gateway) + elif self.type == 2: + # check that it's OK + dns.ipv6.inet_aton(self.gateway) + elif self.type == 3: + if not isinstance(self.gateway, dns.name.Name): + raise SyntaxError(f"invalid {self.name}; not a name") + else: + raise SyntaxError(self._invalid_type(self.type)) + + def to_text(self, origin=None, relativize=True): + if self.type == 0: + return "." + elif self.type in (1, 2): + return self.gateway + elif self.type == 3: + return str(self.gateway.choose_relativity(origin, relativize)) + else: + raise ValueError(self._invalid_type(self.type)) # pragma: no cover + + @classmethod + def from_text(cls, gateway_type, tok, origin=None, relativize=True, + relativize_to=None): + if gateway_type in (0, 1, 2): + gateway = tok.get_string() + elif gateway_type == 3: + gateway = tok.get_name(origin, relativize, relativize_to) + else: + raise dns.exception.SyntaxError( + cls._invalid_type(gateway_type)) # pragma: no cover + return cls(gateway_type, gateway) + + # pylint: disable=unused-argument + def to_wire(self, file, compress=None, origin=None, canonicalize=False): + if self.type == 0: + pass + elif self.type == 1: + file.write(dns.ipv4.inet_aton(self.gateway)) + elif self.type == 2: + file.write(dns.ipv6.inet_aton(self.gateway)) + elif self.type == 3: + self.gateway.to_wire(file, None, origin, False) + else: + raise ValueError(self._invalid_type(self.type)) # pragma: no cover + # pylint: enable=unused-argument + + @classmethod + def from_wire_parser(cls, gateway_type, parser, origin=None): + if gateway_type == 0: + gateway = None + elif gateway_type == 1: + gateway = dns.ipv4.inet_ntoa(parser.get_bytes(4)) + elif gateway_type == 2: + gateway = dns.ipv6.inet_ntoa(parser.get_bytes(16)) + elif gateway_type == 3: + gateway = parser.get_name(origin) + else: + raise dns.exception.FormError(cls._invalid_type(gateway_type)) + return cls(gateway_type, gateway) + + +class Bitmap: + """A helper class for the NSEC/NSEC3/CSYNC type bitmaps""" + type_name = "" + + def __init__(self, windows=None): + last_window = -1 + self.windows = windows + for (window, bitmap) in self.windows: + if not isinstance(window, int): + raise ValueError(f"bad {self.type_name} window type") + if window <= last_window: + raise ValueError(f"bad {self.type_name} window order") + if window > 256: + raise ValueError(f"bad {self.type_name} window number") + last_window = window + if not isinstance(bitmap, bytes): + raise ValueError(f"bad {self.type_name} octets type") + if len(bitmap) == 0 or len(bitmap) > 32: + raise ValueError(f"bad {self.type_name} octets") + + def to_text(self): + text = "" + for (window, bitmap) in self.windows: + bits = [] + for (i, byte) in enumerate(bitmap): + for j in range(0, 8): + if byte & (0x80 >> j): + rdtype = window * 256 + i * 8 + j + bits.append(dns.rdatatype.to_text(rdtype)) + text += (' ' + ' '.join(bits)) + return text + + @classmethod + def from_text(cls, tok): + rdtypes = [] + for token in tok.get_remaining(): + rdtype = dns.rdatatype.from_text(token.unescape().value) + if rdtype == 0: + raise dns.exception.SyntaxError(f"{cls.type_name} with bit 0") + rdtypes.append(rdtype) + rdtypes.sort() + window = 0 + octets = 0 + prior_rdtype = 0 + bitmap = bytearray(b'\0' * 32) + windows = [] + for rdtype in rdtypes: + if rdtype == prior_rdtype: + continue + prior_rdtype = rdtype + new_window = rdtype // 256 + if new_window != window: + if octets != 0: + windows.append((window, bytes(bitmap[0:octets]))) + bitmap = bytearray(b'\0' * 32) + window = new_window + offset = rdtype % 256 + byte = offset // 8 + bit = offset % 8 + octets = byte + 1 + bitmap[byte] = bitmap[byte] | (0x80 >> bit) + if octets != 0: + windows.append((window, bytes(bitmap[0:octets]))) + return cls(windows) + + def to_wire(self, file): + for (window, bitmap) in self.windows: + file.write(struct.pack('!BB', window, len(bitmap))) + file.write(bitmap) + + @classmethod + def from_wire_parser(cls, parser): + windows = [] + while parser.remaining() > 0: + window = parser.get_uint8() + bitmap = parser.get_counted_bytes() + windows.append((window, bitmap)) + return cls(windows) + + +def _priority_table(items): + by_priority = collections.defaultdict(list) + for rdata in items: + by_priority[rdata._processing_priority()].append(rdata) + return by_priority + +def priority_processing_order(iterable): + items = list(iterable) + if len(items) == 1: + return items + by_priority = _priority_table(items) + ordered = [] + for k in sorted(by_priority.keys()): + rdatas = by_priority[k] + random.shuffle(rdatas) + ordered.extend(rdatas) + return ordered + +_no_weight = 0.1 + +def weighted_processing_order(iterable): + items = list(iterable) + if len(items) == 1: + return items + by_priority = _priority_table(items) + ordered = [] + for k in sorted(by_priority.keys()): + rdatas = by_priority[k] + total = sum(rdata._processing_weight() or _no_weight + for rdata in rdatas) + while len(rdatas) > 1: + r = random.uniform(0, total) + for (n, rdata) in enumerate(rdatas): + weight = rdata._processing_weight() or _no_weight + if weight > r: + break + r -= weight + total -= weight + ordered.append(rdata) # pylint: disable=undefined-loop-variable + del rdatas[n] # pylint: disable=undefined-loop-variable + ordered.append(rdatas[0]) + return ordered + +def parse_formatted_hex(formatted, num_chunks, chunk_size, separator): + if len(formatted) != num_chunks * (chunk_size + 1) - 1: + raise ValueError('invalid formatted hex string') + value = b'' + for _ in range(num_chunks): + chunk = formatted[0:chunk_size] + value += int(chunk, 16).to_bytes(chunk_size // 2, 'big') + formatted = formatted[chunk_size:] + if len(formatted) > 0 and formatted[0] != separator: + raise ValueError('invalid formatted hex string') + formatted = formatted[1:] + return value diff --git a/venv/lib/python3.10/site-packages/dns/renderer.py b/venv/lib/python3.10/site-packages/dns/renderer.py new file mode 100644 index 0000000..4e4391c --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/renderer.py @@ -0,0 +1,250 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Help for building DNS wire format messages""" + +import contextlib +import io +import struct +import random +import time + +import dns.exception +import dns.tsig + + +QUESTION = 0 +ANSWER = 1 +AUTHORITY = 2 +ADDITIONAL = 3 + + +class Renderer: + """Helper class for building DNS wire-format messages. + + Most applications can use the higher-level L{dns.message.Message} + class and its to_wire() method to generate wire-format messages. + This class is for those applications which need finer control + over the generation of messages. + + Typical use:: + + r = dns.renderer.Renderer(id=1, flags=0x80, max_size=512) + r.add_question(qname, qtype, qclass) + r.add_rrset(dns.renderer.ANSWER, rrset_1) + r.add_rrset(dns.renderer.ANSWER, rrset_2) + r.add_rrset(dns.renderer.AUTHORITY, ns_rrset) + r.add_edns(0, 0, 4096) + r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_1) + r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_2) + r.write_header() + r.add_tsig(keyname, secret, 300, 1, 0, '', request_mac) + wire = r.get_wire() + + output, an io.BytesIO, where rendering is written + + id: the message id + + flags: the message flags + + max_size: the maximum size of the message + + origin: the origin to use when rendering relative names + + compress: the compression table + + section: an int, the section currently being rendered + + counts: list of the number of RRs in each section + + mac: the MAC of the rendered message (if TSIG was used) + """ + + def __init__(self, id=None, flags=0, max_size=65535, origin=None): + """Initialize a new renderer.""" + + self.output = io.BytesIO() + if id is None: + self.id = random.randint(0, 65535) + else: + self.id = id + self.flags = flags + self.max_size = max_size + self.origin = origin + self.compress = {} + self.section = QUESTION + self.counts = [0, 0, 0, 0] + self.output.write(b'\x00' * 12) + self.mac = '' + + def _rollback(self, where): + """Truncate the output buffer at offset *where*, and remove any + compression table entries that pointed beyond the truncation + point. + """ + + self.output.seek(where) + self.output.truncate() + keys_to_delete = [] + for k, v in self.compress.items(): + if v >= where: + keys_to_delete.append(k) + for k in keys_to_delete: + del self.compress[k] + + def _set_section(self, section): + """Set the renderer's current section. + + Sections must be rendered order: QUESTION, ANSWER, AUTHORITY, + ADDITIONAL. Sections may be empty. + + Raises dns.exception.FormError if an attempt was made to set + a section value less than the current section. + """ + + if self.section != section: + if self.section > section: + raise dns.exception.FormError + self.section = section + + @contextlib.contextmanager + def _track_size(self): + start = self.output.tell() + yield start + if self.output.tell() > self.max_size: + self._rollback(start) + raise dns.exception.TooBig + + def add_question(self, qname, rdtype, rdclass=dns.rdataclass.IN): + """Add a question to the message.""" + + self._set_section(QUESTION) + with self._track_size(): + qname.to_wire(self.output, self.compress, self.origin) + self.output.write(struct.pack("!HH", rdtype, rdclass)) + self.counts[QUESTION] += 1 + + def add_rrset(self, section, rrset, **kw): + """Add the rrset to the specified section. + + Any keyword arguments are passed on to the rdataset's to_wire() + routine. + """ + + self._set_section(section) + with self._track_size(): + n = rrset.to_wire(self.output, self.compress, self.origin, **kw) + self.counts[section] += n + + def add_rdataset(self, section, name, rdataset, **kw): + """Add the rdataset to the specified section, using the specified + name as the owner name. + + Any keyword arguments are passed on to the rdataset's to_wire() + routine. + """ + + self._set_section(section) + with self._track_size(): + n = rdataset.to_wire(name, self.output, self.compress, self.origin, + **kw) + self.counts[section] += n + + def add_edns(self, edns, ednsflags, payload, options=None): + """Add an EDNS OPT record to the message.""" + + # make sure the EDNS version in ednsflags agrees with edns + ednsflags &= 0xFF00FFFF + ednsflags |= (edns << 16) + opt = dns.message.Message._make_opt(ednsflags, payload, options) + self.add_rrset(ADDITIONAL, opt) + + def add_tsig(self, keyname, secret, fudge, id, tsig_error, other_data, + request_mac, algorithm=dns.tsig.default_algorithm): + """Add a TSIG signature to the message.""" + + s = self.output.getvalue() + + if isinstance(secret, dns.tsig.Key): + key = secret + else: + key = dns.tsig.Key(keyname, secret, algorithm) + tsig = dns.message.Message._make_tsig(keyname, algorithm, 0, fudge, + b'', id, tsig_error, other_data) + (tsig, _) = dns.tsig.sign(s, key, tsig[0], int(time.time()), + request_mac) + self._write_tsig(tsig, keyname) + + def add_multi_tsig(self, ctx, keyname, secret, fudge, id, tsig_error, + other_data, request_mac, + algorithm=dns.tsig.default_algorithm): + """Add a TSIG signature to the message. Unlike add_tsig(), this can be + used for a series of consecutive DNS envelopes, e.g. for a zone + transfer over TCP [RFC2845, 4.4]. + + For the first message in the sequence, give ctx=None. For each + subsequent message, give the ctx that was returned from the + add_multi_tsig() call for the previous message.""" + + s = self.output.getvalue() + + if isinstance(secret, dns.tsig.Key): + key = secret + else: + key = dns.tsig.Key(keyname, secret, algorithm) + tsig = dns.message.Message._make_tsig(keyname, algorithm, 0, fudge, + b'', id, tsig_error, other_data) + (tsig, ctx) = dns.tsig.sign(s, key, tsig[0], int(time.time()), + request_mac, ctx, True) + self._write_tsig(tsig, keyname) + return ctx + + def _write_tsig(self, tsig, keyname): + self._set_section(ADDITIONAL) + with self._track_size(): + keyname.to_wire(self.output, self.compress, self.origin) + self.output.write(struct.pack('!HHIH', dns.rdatatype.TSIG, + dns.rdataclass.ANY, 0, 0)) + rdata_start = self.output.tell() + tsig.to_wire(self.output) + + after = self.output.tell() + self.output.seek(rdata_start - 2) + self.output.write(struct.pack('!H', after - rdata_start)) + self.counts[ADDITIONAL] += 1 + self.output.seek(10) + self.output.write(struct.pack('!H', self.counts[ADDITIONAL])) + self.output.seek(0, io.SEEK_END) + + def write_header(self): + """Write the DNS message header. + + Writing the DNS message header is done after all sections + have been rendered, but before the optional TSIG signature + is added. + """ + + self.output.seek(0) + self.output.write(struct.pack('!HHHHHH', self.id, self.flags, + self.counts[0], self.counts[1], + self.counts[2], self.counts[3])) + self.output.seek(0, io.SEEK_END) + + def get_wire(self): + """Return the wire format message.""" + + return self.output.getvalue() diff --git a/venv/lib/python3.10/site-packages/dns/resolver.py b/venv/lib/python3.10/site-packages/dns/resolver.py new file mode 100644 index 0000000..7da7a61 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/resolver.py @@ -0,0 +1,1575 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS stub resolver.""" +from urllib.parse import urlparse +import contextlib +import socket +import sys +import time +import random +import warnings +try: + import threading as _threading +except ImportError: # pragma: no cover + import dummy_threading as _threading # type: ignore + +import dns.exception +import dns.flags +import dns.inet +import dns.ipv4 +import dns.ipv6 +import dns.message +import dns.name +import dns.query +import dns.rcode +import dns.rdataclass +import dns.rdatatype +import dns.reversename +import dns.tsig + +if sys.platform == 'win32': + import dns.win32util + +class NXDOMAIN(dns.exception.DNSException): + """The DNS query name does not exist.""" + supp_kwargs = {'qnames', 'responses'} + fmt = None # we have our own __str__ implementation + + # pylint: disable=arguments-differ + + def _check_kwargs(self, qnames, + responses=None): + if not isinstance(qnames, (list, tuple, set)): + raise AttributeError("qnames must be a list, tuple or set") + if len(qnames) == 0: + raise AttributeError("qnames must contain at least one element") + if responses is None: + responses = {} + elif not isinstance(responses, dict): + raise AttributeError("responses must be a dict(qname=response)") + kwargs = dict(qnames=qnames, responses=responses) + return kwargs + + def __str__(self): + if 'qnames' not in self.kwargs: + return super().__str__() + qnames = self.kwargs['qnames'] + if len(qnames) > 1: + msg = 'None of DNS query names exist' + else: + msg = 'The DNS query name does not exist' + qnames = ', '.join(map(str, qnames)) + return "{}: {}".format(msg, qnames) + + @property + def canonical_name(self): + """Return the unresolved canonical name.""" + if 'qnames' not in self.kwargs: + raise TypeError("parametrized exception required") + for qname in self.kwargs['qnames']: + response = self.kwargs['responses'][qname] + try: + cname = response.canonical_name() + if cname != qname: + return cname + except Exception: + # We can just eat this exception as it means there was + # something wrong with the response. + pass + return self.kwargs['qnames'][0] + + def __add__(self, e_nx): + """Augment by results from another NXDOMAIN exception.""" + qnames0 = list(self.kwargs.get('qnames', [])) + responses0 = dict(self.kwargs.get('responses', {})) + responses1 = e_nx.kwargs.get('responses', {}) + for qname1 in e_nx.kwargs.get('qnames', []): + if qname1 not in qnames0: + qnames0.append(qname1) + if qname1 in responses1: + responses0[qname1] = responses1[qname1] + return NXDOMAIN(qnames=qnames0, responses=responses0) + + def qnames(self): + """All of the names that were tried. + + Returns a list of ``dns.name.Name``. + """ + return self.kwargs['qnames'] + + def responses(self): + """A map from queried names to their NXDOMAIN responses. + + Returns a dict mapping a ``dns.name.Name`` to a + ``dns.message.Message``. + """ + return self.kwargs['responses'] + + def response(self, qname): + """The response for query *qname*. + + Returns a ``dns.message.Message``. + """ + return self.kwargs['responses'][qname] + + +class YXDOMAIN(dns.exception.DNSException): + """The DNS query name is too long after DNAME substitution.""" + + +def _errors_to_text(errors): + """Turn a resolution errors trace into a list of text.""" + texts = [] + for err in errors: + texts.append('Server {} {} port {} answered {}'.format(err[0], + 'TCP' if err[1] else 'UDP', err[2], err[3])) + return texts + + +class LifetimeTimeout(dns.exception.Timeout): + """The resolution lifetime expired.""" + + msg = "The resolution lifetime expired." + fmt = "%s after {timeout:.3f} seconds: {errors}" % msg[:-1] + supp_kwargs = {'timeout', 'errors'} + + def _fmt_kwargs(self, **kwargs): + srv_msgs = _errors_to_text(kwargs['errors']) + return super()._fmt_kwargs(timeout=kwargs['timeout'], + errors='; '.join(srv_msgs)) + + +# We added more detail to resolution timeouts, but they are still +# subclasses of dns.exception.Timeout for backwards compatibility. We also +# keep dns.resolver.Timeout defined for backwards compatibility. +Timeout = LifetimeTimeout + + +class NoAnswer(dns.exception.DNSException): + """The DNS response does not contain an answer to the question.""" + fmt = 'The DNS response does not contain an answer ' + \ + 'to the question: {query}' + supp_kwargs = {'response'} + + def _fmt_kwargs(self, **kwargs): + return super()._fmt_kwargs(query=kwargs['response'].question) + + def response(self): + return self.kwargs['response'] + + +class NoNameservers(dns.exception.DNSException): + """All nameservers failed to answer the query. + + errors: list of servers and respective errors + The type of errors is + [(server IP address, any object convertible to string)]. + Non-empty errors list will add explanatory message () + """ + + msg = "All nameservers failed to answer the query." + fmt = "%s {query}: {errors}" % msg[:-1] + supp_kwargs = {'request', 'errors'} + + def _fmt_kwargs(self, **kwargs): + srv_msgs = _errors_to_text(kwargs['errors']) + return super()._fmt_kwargs(query=kwargs['request'].question, + errors='; '.join(srv_msgs)) + + +class NotAbsolute(dns.exception.DNSException): + """An absolute domain name is required but a relative name was provided.""" + + +class NoRootSOA(dns.exception.DNSException): + """There is no SOA RR at the DNS root name. This should never happen!""" + + +class NoMetaqueries(dns.exception.DNSException): + """DNS metaqueries are not allowed.""" + +class NoResolverConfiguration(dns.exception.DNSException): + """Resolver configuration could not be read or specified no nameservers.""" + +class Answer: + """DNS stub resolver answer. + + Instances of this class bundle up the result of a successful DNS + resolution. + + For convenience, the answer object implements much of the sequence + protocol, forwarding to its ``rrset`` attribute. E.g. + ``for a in answer`` is equivalent to ``for a in answer.rrset``. + ``answer[i]`` is equivalent to ``answer.rrset[i]``, and + ``answer[i:j]`` is equivalent to ``answer.rrset[i:j]``. + + Note that CNAMEs or DNAMEs in the response may mean that answer + RRset's name might not be the query name. + """ + + def __init__(self, qname, rdtype, rdclass, response, nameserver=None, + port=None): + self.qname = qname + self.rdtype = rdtype + self.rdclass = rdclass + self.response = response + self.nameserver = nameserver + self.port = port + self.chaining_result = response.resolve_chaining() + # Copy some attributes out of chaining_result for backwards + # compatibility and convenience. + self.canonical_name = self.chaining_result.canonical_name + self.rrset = self.chaining_result.answer + self.expiration = time.time() + self.chaining_result.minimum_ttl + + def __getattr__(self, attr): # pragma: no cover + if attr == 'name': + return self.rrset.name + elif attr == 'ttl': + return self.rrset.ttl + elif attr == 'covers': + return self.rrset.covers + elif attr == 'rdclass': + return self.rrset.rdclass + elif attr == 'rdtype': + return self.rrset.rdtype + else: + raise AttributeError(attr) + + def __len__(self): + return self.rrset and len(self.rrset) or 0 + + def __iter__(self): + return self.rrset and iter(self.rrset) or iter(tuple()) + + def __getitem__(self, i): + if self.rrset is None: + raise IndexError + return self.rrset[i] + + def __delitem__(self, i): + if self.rrset is None: + raise IndexError + del self.rrset[i] + + +class CacheStatistics: + """Cache Statistics + """ + + def __init__(self, hits=0, misses=0): + self.hits = hits + self.misses = misses + + def reset(self): + self.hits = 0 + self.misses = 0 + + def clone(self): + return CacheStatistics(self.hits, self.misses) + + +class CacheBase: + def __init__(self): + self.lock = _threading.Lock() + self.statistics = CacheStatistics() + + def reset_statistics(self): + """Reset all statistics to zero.""" + with self.lock: + self.statistics.reset() + + def hits(self): + """How many hits has the cache had?""" + with self.lock: + return self.statistics.hits + + def misses(self): + """How many misses has the cache had?""" + with self.lock: + return self.statistics.misses + + def get_statistics_snapshot(self): + """Return a consistent snapshot of all the statistics. + + If running with multiple threads, it's better to take a + snapshot than to call statistics methods such as hits() and + misses() individually. + """ + with self.lock: + return self.statistics.clone() + + +class Cache(CacheBase): + """Simple thread-safe DNS answer cache.""" + + def __init__(self, cleaning_interval=300.0): + """*cleaning_interval*, a ``float`` is the number of seconds between + periodic cleanings. + """ + + super().__init__() + self.data = {} + self.cleaning_interval = cleaning_interval + self.next_cleaning = time.time() + self.cleaning_interval + + def _maybe_clean(self): + """Clean the cache if it's time to do so.""" + + now = time.time() + if self.next_cleaning <= now: + keys_to_delete = [] + for (k, v) in self.data.items(): + if v.expiration <= now: + keys_to_delete.append(k) + for k in keys_to_delete: + del self.data[k] + now = time.time() + self.next_cleaning = now + self.cleaning_interval + + def get(self, key): + """Get the answer associated with *key*. + + Returns None if no answer is cached for the key. + + *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the + query name, rdtype, and rdclass respectively. + + Returns a ``dns.resolver.Answer`` or ``None``. + """ + + with self.lock: + self._maybe_clean() + v = self.data.get(key) + if v is None or v.expiration <= time.time(): + self.statistics.misses += 1 + return None + self.statistics.hits += 1 + return v + + def put(self, key, value): + """Associate key and value in the cache. + + *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the + query name, rdtype, and rdclass respectively. + + *value*, a ``dns.resolver.Answer``, the answer. + """ + + with self.lock: + self._maybe_clean() + self.data[key] = value + + def flush(self, key=None): + """Flush the cache. + + If *key* is not ``None``, only that item is flushed. Otherwise + the entire cache is flushed. + + *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the + query name, rdtype, and rdclass respectively. + """ + + with self.lock: + if key is not None: + if key in self.data: + del self.data[key] + else: + self.data = {} + self.next_cleaning = time.time() + self.cleaning_interval + + +class LRUCacheNode: + """LRUCache node.""" + + def __init__(self, key, value): + self.key = key + self.value = value + self.hits = 0 + self.prev = self + self.next = self + + def link_after(self, node): + self.prev = node + self.next = node.next + node.next.prev = self + node.next = self + + def unlink(self): + self.next.prev = self.prev + self.prev.next = self.next + + +class LRUCache(CacheBase): + """Thread-safe, bounded, least-recently-used DNS answer cache. + + This cache is better than the simple cache (above) if you're + running a web crawler or other process that does a lot of + resolutions. The LRUCache has a maximum number of nodes, and when + it is full, the least-recently used node is removed to make space + for a new one. + """ + + def __init__(self, max_size=100000): + """*max_size*, an ``int``, is the maximum number of nodes to cache; + it must be greater than 0. + """ + + super().__init__() + self.data = {} + self.set_max_size(max_size) + self.sentinel = LRUCacheNode(None, None) + self.sentinel.prev = self.sentinel + self.sentinel.next = self.sentinel + + def set_max_size(self, max_size): + if max_size < 1: + max_size = 1 + self.max_size = max_size + + def get(self, key): + """Get the answer associated with *key*. + + Returns None if no answer is cached for the key. + + *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the + query name, rdtype, and rdclass respectively. + + Returns a ``dns.resolver.Answer`` or ``None``. + """ + + with self.lock: + node = self.data.get(key) + if node is None: + self.statistics.misses += 1 + return None + # Unlink because we're either going to move the node to the front + # of the LRU list or we're going to free it. + node.unlink() + if node.value.expiration <= time.time(): + del self.data[node.key] + self.statistics.misses += 1 + return None + node.link_after(self.sentinel) + self.statistics.hits += 1 + node.hits += 1 + return node.value + + def get_hits_for_key(self, key): + """Return the number of cache hits associated with the specified key.""" + with self.lock: + node = self.data.get(key) + if node is None or node.value.expiration <= time.time(): + return 0 + else: + return node.hits + + def put(self, key, value): + """Associate key and value in the cache. + + *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the + query name, rdtype, and rdclass respectively. + + *value*, a ``dns.resolver.Answer``, the answer. + """ + + with self.lock: + node = self.data.get(key) + if node is not None: + node.unlink() + del self.data[node.key] + while len(self.data) >= self.max_size: + node = self.sentinel.prev + node.unlink() + del self.data[node.key] + node = LRUCacheNode(key, value) + node.link_after(self.sentinel) + self.data[key] = node + + def flush(self, key=None): + """Flush the cache. + + If *key* is not ``None``, only that item is flushed. Otherwise + the entire cache is flushed. + + *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the + query name, rdtype, and rdclass respectively. + """ + + with self.lock: + if key is not None: + node = self.data.get(key) + if node is not None: + node.unlink() + del self.data[node.key] + else: + node = self.sentinel.next + while node != self.sentinel: + next = node.next + node.unlink() + node = next + self.data = {} + +class _Resolution: + """Helper class for dns.resolver.Resolver.resolve(). + + All of the "business logic" of resolution is encapsulated in this + class, allowing us to have multiple resolve() implementations + using different I/O schemes without copying all of the + complicated logic. + + This class is a "friend" to dns.resolver.Resolver and manipulates + resolver data structures directly. + """ + + def __init__(self, resolver, qname, rdtype, rdclass, tcp, + raise_on_no_answer, search): + if isinstance(qname, str): + qname = dns.name.from_text(qname, None) + rdtype = dns.rdatatype.RdataType.make(rdtype) + if dns.rdatatype.is_metatype(rdtype): + raise NoMetaqueries + rdclass = dns.rdataclass.RdataClass.make(rdclass) + if dns.rdataclass.is_metaclass(rdclass): + raise NoMetaqueries + self.resolver = resolver + self.qnames_to_try = resolver._get_qnames_to_try(qname, search) + self.qnames = self.qnames_to_try[:] + self.rdtype = rdtype + self.rdclass = rdclass + self.tcp = tcp + self.raise_on_no_answer = raise_on_no_answer + self.nxdomain_responses = {} + # + # Initialize other things to help analysis tools + self.qname = dns.name.empty + self.nameservers = [] + self.current_nameservers = [] + self.errors = [] + self.nameserver = None + self.port = 0 + self.tcp_attempt = False + self.retry_with_tcp = False + self.request = None + self.backoff = 0 + + def next_request(self): + """Get the next request to send, and check the cache. + + Returns a (request, answer) tuple. At most one of request or + answer will not be None. + """ + + # We return a tuple instead of Union[Message,Answer] as it lets + # the caller avoid isinstance(). + + while len(self.qnames) > 0: + self.qname = self.qnames.pop(0) + + # Do we know the answer? + if self.resolver.cache: + answer = self.resolver.cache.get((self.qname, self.rdtype, + self.rdclass)) + if answer is not None: + if answer.rrset is None and self.raise_on_no_answer: + raise NoAnswer(response=answer.response) + else: + return (None, answer) + answer = self.resolver.cache.get((self.qname, + dns.rdatatype.ANY, + self.rdclass)) + if answer is not None and \ + answer.response.rcode() == dns.rcode.NXDOMAIN: + # cached NXDOMAIN; record it and continue to next + # name. + self.nxdomain_responses[self.qname] = answer.response + continue + + # Build the request + request = dns.message.make_query(self.qname, self.rdtype, + self.rdclass) + if self.resolver.keyname is not None: + request.use_tsig(self.resolver.keyring, self.resolver.keyname, + algorithm=self.resolver.keyalgorithm) + request.use_edns(self.resolver.edns, self.resolver.ednsflags, + self.resolver.payload) + if self.resolver.flags is not None: + request.flags = self.resolver.flags + + self.nameservers = self.resolver.nameservers[:] + if self.resolver.rotate: + random.shuffle(self.nameservers) + self.current_nameservers = self.nameservers[:] + self.errors = [] + self.nameserver = None + self.tcp_attempt = False + self.retry_with_tcp = False + self.request = request + self.backoff = 0.10 + + return (request, None) + + # + # We've tried everything and only gotten NXDOMAINs. (We know + # it's only NXDOMAINs as anything else would have returned + # before now.) + # + raise NXDOMAIN(qnames=self.qnames_to_try, + responses=self.nxdomain_responses) + + def next_nameserver(self): + if self.retry_with_tcp: + assert self.nameserver is not None + self.tcp_attempt = True + self.retry_with_tcp = False + return (self.nameserver, self.port, True, 0) + + backoff = 0 + if not self.current_nameservers: + if len(self.nameservers) == 0: + # Out of things to try! + raise NoNameservers(request=self.request, errors=self.errors) + self.current_nameservers = self.nameservers[:] + backoff = self.backoff + self.backoff = min(self.backoff * 2, 2) + + self.nameserver = self.current_nameservers.pop(0) + self.port = self.resolver.nameserver_ports.get(self.nameserver, + self.resolver.port) + self.tcp_attempt = self.tcp + return (self.nameserver, self.port, self.tcp_attempt, backoff) + + def query_result(self, response, ex): + # + # returns an (answer: Answer, end_loop: bool) tuple. + # + if ex: + # Exception during I/O or from_wire() + assert response is None + self.errors.append((self.nameserver, self.tcp_attempt, self.port, + ex, response)) + if isinstance(ex, dns.exception.FormError) or \ + isinstance(ex, EOFError) or \ + isinstance(ex, OSError) or \ + isinstance(ex, NotImplementedError): + # This nameserver is no good, take it out of the mix. + self.nameservers.remove(self.nameserver) + elif isinstance(ex, dns.message.Truncated): + if self.tcp_attempt: + # Truncation with TCP is no good! + self.nameservers.remove(self.nameserver) + else: + self.retry_with_tcp = True + return (None, False) + # We got an answer! + assert response is not None + rcode = response.rcode() + if rcode == dns.rcode.NOERROR: + try: + answer = Answer(self.qname, self.rdtype, self.rdclass, response, + self.nameserver, self.port) + except Exception as e: + self.errors.append((self.nameserver, self.tcp_attempt, + self.port, e, response)) + # The nameserver is no good, take it out of the mix. + self.nameservers.remove(self.nameserver) + return (None, False) + if self.resolver.cache: + self.resolver.cache.put((self.qname, self.rdtype, + self.rdclass), answer) + if answer.rrset is None and self.raise_on_no_answer: + raise NoAnswer(response=answer.response) + return (answer, True) + elif rcode == dns.rcode.NXDOMAIN: + # Further validate the response by making an Answer, even + # if we aren't going to cache it. + try: + answer = Answer(self.qname, dns.rdatatype.ANY, + dns.rdataclass.IN, response) + except Exception as e: + self.errors.append((self.nameserver, self.tcp_attempt, + self.port, e, response)) + # The nameserver is no good, take it out of the mix. + self.nameservers.remove(self.nameserver) + return (None, False) + self.nxdomain_responses[self.qname] = response + if self.resolver.cache: + self.resolver.cache.put((self.qname, + dns.rdatatype.ANY, + self.rdclass), answer) + # Make next_nameserver() return None, so caller breaks its + # inner loop and calls next_request(). + return (None, True) + elif rcode == dns.rcode.YXDOMAIN: + yex = YXDOMAIN() + self.errors.append((self.nameserver, self.tcp_attempt, + self.port, yex, response)) + raise yex + else: + # + # We got a response, but we're not happy with the + # rcode in it. + # + if rcode != dns.rcode.SERVFAIL or not self.resolver.retry_servfail: + self.nameservers.remove(self.nameserver) + self.errors.append((self.nameserver, self.tcp_attempt, self.port, + dns.rcode.to_text(rcode), response)) + return (None, False) + +class BaseResolver: + """DNS stub resolver.""" + + # We initialize in reset() + # + # pylint: disable=attribute-defined-outside-init + + def __init__(self, filename='/etc/resolv.conf', configure=True): + """*filename*, a ``str`` or file object, specifying a file + in standard /etc/resolv.conf format. This parameter is meaningful + only when *configure* is true and the platform is POSIX. + + *configure*, a ``bool``. If True (the default), the resolver + instance is configured in the normal fashion for the operating + system the resolver is running on. (I.e. by reading a + /etc/resolv.conf file on POSIX systems and from the registry + on Windows systems.) + """ + + self.reset() + if configure: + if sys.platform == 'win32': + self.read_registry() + elif filename: + self.read_resolv_conf(filename) + + def reset(self): + """Reset all resolver configuration to the defaults.""" + + self.domain = \ + dns.name.Name(dns.name.from_text(socket.gethostname())[1:]) + if len(self.domain) == 0: + self.domain = dns.name.root + self.nameservers = [] + self.nameserver_ports = {} + self.port = 53 + self.search = [] + self.use_search_by_default = False + self.timeout = 2.0 + self.lifetime = 5.0 + self.keyring = None + self.keyname = None + self.keyalgorithm = dns.tsig.default_algorithm + self.edns = -1 + self.ednsflags = 0 + self.payload = 0 + self.cache = None + self.flags = None + self.retry_servfail = False + self.rotate = False + self.ndots = None + + def read_resolv_conf(self, f): + """Process *f* as a file in the /etc/resolv.conf format. If f is + a ``str``, it is used as the name of the file to open; otherwise it + is treated as the file itself. + + Interprets the following items: + + - nameserver - name server IP address + + - domain - local domain name + + - search - search list for host-name lookup + + - options - supported options are rotate, timeout, edns0, and ndots + + """ + + with contextlib.ExitStack() as stack: + if isinstance(f, str): + try: + f = stack.enter_context(open(f)) + except OSError: + # /etc/resolv.conf doesn't exist, can't be read, etc. + raise NoResolverConfiguration(f'cannot open {f}') + + for l in f: + if len(l) == 0 or l[0] == '#' or l[0] == ';': + continue + tokens = l.split() + + # Any line containing less than 2 tokens is malformed + if len(tokens) < 2: + continue + + if tokens[0] == 'nameserver': + self.nameservers.append(tokens[1]) + elif tokens[0] == 'domain': + self.domain = dns.name.from_text(tokens[1]) + # domain and search are exclusive + self.search = [] + elif tokens[0] == 'search': + # the last search wins + self.search = [] + for suffix in tokens[1:]: + self.search.append(dns.name.from_text(suffix)) + # We don't set domain as it is not used if + # len(self.search) > 0 + elif tokens[0] == 'options': + for opt in tokens[1:]: + if opt == 'rotate': + self.rotate = True + elif opt == 'edns0': + self.use_edns() + elif 'timeout' in opt: + try: + self.timeout = int(opt.split(':')[1]) + except (ValueError, IndexError): + pass + elif 'ndots' in opt: + try: + self.ndots = int(opt.split(':')[1]) + except (ValueError, IndexError): + pass + if len(self.nameservers) == 0: + raise NoResolverConfiguration('no nameservers') + + def read_registry(self): + """Extract resolver configuration from the Windows registry.""" + try: + info = dns.win32util.get_dns_info() + if info.domain is not None: + self.domain = info.domain + self.nameservers = info.nameservers + self.search = info.search + except AttributeError: + raise NotImplementedError + + def _compute_timeout(self, start, lifetime=None, errors=None): + lifetime = self.lifetime if lifetime is None else lifetime + now = time.time() + duration = now - start + if errors is None: + errors = [] + if duration < 0: + if duration < -1: + # Time going backwards is bad. Just give up. + raise LifetimeTimeout(timeout=duration, errors=errors) + else: + # Time went backwards, but only a little. This can + # happen, e.g. under vmware with older linux kernels. + # Pretend it didn't happen. + now = start + if duration >= lifetime: + raise LifetimeTimeout(timeout=duration, errors=errors) + return min(lifetime - duration, self.timeout) + + def _get_qnames_to_try(self, qname, search): + # This is a separate method so we can unit test the search + # rules without requiring the Internet. + if search is None: + search = self.use_search_by_default + qnames_to_try = [] + if qname.is_absolute(): + qnames_to_try.append(qname) + else: + abs_qname = qname.concatenate(dns.name.root) + if search: + if len(self.search) > 0: + # There is a search list, so use it exclusively + search_list = self.search[:] + elif self.domain != dns.name.root and self.domain is not None: + # We have some notion of a domain that isn't the root, so + # use it as the search list. + search_list = [self.domain] + else: + search_list = [] + # Figure out the effective ndots (default is 1) + if self.ndots is None: + ndots = 1 + else: + ndots = self.ndots + for suffix in search_list: + qnames_to_try.append(qname + suffix) + if len(qname) > ndots: + # The name has at least ndots dots, so we should try an + # absolute query first. + qnames_to_try.insert(0, abs_qname) + else: + # The name has less than ndots dots, so we should search + # first, then try the absolute name. + qnames_to_try.append(abs_qname) + else: + qnames_to_try.append(abs_qname) + return qnames_to_try + + def use_tsig(self, keyring, keyname=None, + algorithm=dns.tsig.default_algorithm): + """Add a TSIG signature to each query. + + The parameters are passed to ``dns.message.Message.use_tsig()``; + see its documentation for details. + """ + + self.keyring = keyring + self.keyname = keyname + self.keyalgorithm = algorithm + + def use_edns(self, edns=0, ednsflags=0, + payload=dns.message.DEFAULT_EDNS_PAYLOAD): + """Configure EDNS behavior. + + *edns*, an ``int``, is the EDNS level to use. Specifying + ``None``, ``False``, or ``-1`` means "do not use EDNS", and in this case + the other parameters are ignored. Specifying ``True`` is + equivalent to specifying 0, i.e. "use EDNS0". + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the + maximum size of UDP datagram the sender can handle. I.e. how big + a response to this message can be. + """ + + if edns is None or edns is False: + edns = -1 + elif edns is True: + edns = 0 + self.edns = edns + self.ednsflags = ednsflags + self.payload = payload + + def set_flags(self, flags): + """Overrides the default flags with your own. + + *flags*, an ``int``, the message flags to use. + """ + + self.flags = flags + + @property + def nameservers(self): + return self._nameservers + + @nameservers.setter + def nameservers(self, nameservers): + """ + *nameservers*, a ``list`` of nameservers. + + Raises ``ValueError`` if *nameservers* is anything other than a + ``list``. + """ + if isinstance(nameservers, list): + for nameserver in nameservers: + if not dns.inet.is_address(nameserver): + try: + if urlparse(nameserver).scheme != 'https': + raise NotImplementedError + except Exception: + raise ValueError(f'nameserver {nameserver} is not an ' + 'IP address or valid https URL') + self._nameservers = nameservers + else: + raise ValueError('nameservers must be a list' + ' (not a {})'.format(type(nameservers))) + + +class Resolver(BaseResolver): + """DNS stub resolver.""" + + def resolve(self, qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, + tcp=False, source=None, raise_on_no_answer=True, source_port=0, + lifetime=None, search=None): # pylint: disable=arguments-differ + """Query nameservers to find the answer to the question. + + The *qname*, *rdtype*, and *rdclass* parameters may be objects + of the appropriate type, or strings that can be converted into objects + of the appropriate type. + + *qname*, a ``dns.name.Name`` or ``str``, the query name. + + *rdtype*, an ``int`` or ``str``, the query type. + + *rdclass*, an ``int`` or ``str``, the query class. + + *tcp*, a ``bool``. If ``True``, use TCP to make the query. + + *source*, a ``str`` or ``None``. If not ``None``, bind to this IP + address when making queries. + + *raise_on_no_answer*, a ``bool``. If ``True``, raise + ``dns.resolver.NoAnswer`` if there's no answer to the question. + + *source_port*, an ``int``, the port from which to send the message. + + *lifetime*, a ``float``, how many seconds a query should run + before timing out. + + *search*, a ``bool`` or ``None``, determines whether the + search list configured in the system's resolver configuration + are used for relative names, and whether the resolver's domain + may be added to relative names. The default is ``None``, + which causes the value of the resolver's + ``use_search_by_default`` attribute to be used. + + Raises ``dns.resolver.LifetimeTimeout`` if no answers could be found + in the specified lifetime. + + Raises ``dns.resolver.NXDOMAIN`` if the query name does not exist. + + Raises ``dns.resolver.YXDOMAIN`` if the query name is too long after + DNAME substitution. + + Raises ``dns.resolver.NoAnswer`` if *raise_on_no_answer* is + ``True`` and the query name exists but has no RRset of the + desired type and class. + + Raises ``dns.resolver.NoNameservers`` if no non-broken + nameservers are available to answer the question. + + Returns a ``dns.resolver.Answer`` instance. + + """ + + resolution = _Resolution(self, qname, rdtype, rdclass, tcp, + raise_on_no_answer, search) + start = time.time() + while True: + (request, answer) = resolution.next_request() + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + # cache hit! + return answer + done = False + while not done: + (nameserver, port, tcp, backoff) = resolution.next_nameserver() + if backoff: + time.sleep(backoff) + timeout = self._compute_timeout(start, lifetime, + resolution.errors) + try: + if dns.inet.is_address(nameserver): + if tcp: + response = dns.query.tcp(request, nameserver, + timeout=timeout, + port=port, + source=source, + source_port=source_port) + else: + response = dns.query.udp(request, + nameserver, + timeout=timeout, + port=port, + source=source, + source_port=source_port, + raise_on_truncation=True) + else: + response = dns.query.https(request, nameserver, + timeout=timeout) + except Exception as ex: + (_, done) = resolution.query_result(None, ex) + continue + (answer, done) = resolution.query_result(response, None) + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + return answer + + def query(self, qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, + tcp=False, source=None, raise_on_no_answer=True, source_port=0, + lifetime=None): # pragma: no cover + """Query nameservers to find the answer to the question. + + This method calls resolve() with ``search=True``, and is + provided for backwards compatibility with prior versions of + dnspython. See the documentation for the resolve() method for + further details. + """ + warnings.warn('please use dns.resolver.Resolver.resolve() instead', + DeprecationWarning, stacklevel=2) + return self.resolve(qname, rdtype, rdclass, tcp, source, + raise_on_no_answer, source_port, lifetime, + True) + + def resolve_address(self, ipaddr, *args, **kwargs): + """Use a resolver to run a reverse query for PTR records. + + This utilizes the resolve() method to perform a PTR lookup on the + specified IP address. + + *ipaddr*, a ``str``, the IPv4 or IPv6 address you want to get + the PTR record for. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + """ + + return self.resolve(dns.reversename.from_address(ipaddr), + rdtype=dns.rdatatype.PTR, + rdclass=dns.rdataclass.IN, + *args, **kwargs) + + # pylint: disable=redefined-outer-name + + def canonical_name(self, name): + """Determine the canonical name of *name*. + + The canonical name is the name the resolver uses for queries + after all CNAME and DNAME renamings have been applied. + + *name*, a ``dns.name.Name`` or ``str``, the query name. + + This method can raise any exception that ``resolve()`` can + raise, other than ``dns.resolver.NoAnswer`` and + ``dns.resolver.NXDOMAIN``. + + Returns a ``dns.name.Name``. + """ + try: + answer = self.resolve(name, raise_on_no_answer=False) + canonical_name = answer.canonical_name + except dns.resolver.NXDOMAIN as e: + canonical_name = e.canonical_name + return canonical_name + + # pylint: enable=redefined-outer-name + + +#: The default resolver. +default_resolver = None + + +def get_default_resolver(): + """Get the default resolver, initializing it if necessary.""" + if default_resolver is None: + reset_default_resolver() + return default_resolver + + +def reset_default_resolver(): + """Re-initialize default resolver. + + Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX + systems) will be re-read immediately. + """ + + global default_resolver + default_resolver = Resolver() + + +def resolve(qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, + tcp=False, source=None, raise_on_no_answer=True, + source_port=0, lifetime=None, search=None): + """Query nameservers to find the answer to the question. + + This is a convenience function that uses the default resolver + object to make the query. + + See ``dns.resolver.Resolver.resolve`` for more information on the + parameters. + """ + + return get_default_resolver().resolve(qname, rdtype, rdclass, tcp, source, + raise_on_no_answer, source_port, + lifetime, search) + +def query(qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, + tcp=False, source=None, raise_on_no_answer=True, + source_port=0, lifetime=None): # pragma: no cover + """Query nameservers to find the answer to the question. + + This method calls resolve() with ``search=True``, and is + provided for backwards compatibility with prior versions of + dnspython. See the documentation for the resolve() method for + further details. + """ + warnings.warn('please use dns.resolver.resolve() instead', + DeprecationWarning, stacklevel=2) + return resolve(qname, rdtype, rdclass, tcp, source, + raise_on_no_answer, source_port, lifetime, + True) + + +def resolve_address(ipaddr, *args, **kwargs): + """Use a resolver to run a reverse query for PTR records. + + See ``dns.resolver.Resolver.resolve_address`` for more information on the + parameters. + """ + + return get_default_resolver().resolve_address(ipaddr, *args, **kwargs) + + +def canonical_name(name): + """Determine the canonical name of *name*. + + See ``dns.resolver.Resolver.canonical_name`` for more information on the + parameters and possible exceptions. + """ + + return get_default_resolver().canonical_name(name) + + +def zone_for_name(name, rdclass=dns.rdataclass.IN, tcp=False, resolver=None, + lifetime=None): + """Find the name of the zone which contains the specified name. + + *name*, an absolute ``dns.name.Name`` or ``str``, the query name. + + *rdclass*, an ``int``, the query class. + + *tcp*, a ``bool``. If ``True``, use TCP to make the query. + + *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. + If ``None``, the default, then the default resolver is used. + + *lifetime*, a ``float``, the total time to allow for the queries needed + to determine the zone. If ``None``, the default, then only the individual + query limits of the resolver apply. + + Raises ``dns.resolver.NoRootSOA`` if there is no SOA RR at the DNS + root. (This is only likely to happen if you're using non-default + root servers in your network and they are misconfigured.) + + Raises ``dns.resolver.LifetimeTimeout`` if the answer could not be + found in the allotted lifetime. + + Returns a ``dns.name.Name``. + """ + + if isinstance(name, str): + name = dns.name.from_text(name, dns.name.root) + if resolver is None: + resolver = get_default_resolver() + if not name.is_absolute(): + raise NotAbsolute(name) + start = time.time() + if lifetime is not None: + expiration = start + lifetime + else: + expiration = None + while 1: + try: + if expiration: + rlifetime = expiration - time.time() + if rlifetime <= 0: + rlifetime = 0 + else: + rlifetime = None + answer = resolver.resolve(name, dns.rdatatype.SOA, rdclass, tcp, + lifetime=rlifetime) + if answer.rrset.name == name: + return name + # otherwise we were CNAMEd or DNAMEd and need to look higher + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer) as e: + if isinstance(e, dns.resolver.NXDOMAIN): + response = e.responses().get(name) + else: + response = e.response() # pylint: disable=no-value-for-parameter + if response: + for rrs in response.authority: + if rrs.rdtype == dns.rdatatype.SOA and \ + rrs.rdclass == rdclass: + (nr, _, _) = rrs.name.fullcompare(name) + if nr == dns.name.NAMERELN_SUPERDOMAIN: + # We're doing a proper superdomain check as + # if the name were equal we ought to have gotten + # it in the answer section! We are ignoring the + # possibility that the authority is insane and + # is including multiple SOA RRs for different + # authorities. + return rrs.name + # we couldn't extract anything useful from the response (e.g. it's + # a type 3 NXDOMAIN) + try: + name = name.parent() + except dns.name.NoParent: + raise NoRootSOA + +# +# Support for overriding the system resolver for all python code in the +# running process. +# + +_protocols_for_socktype = { + socket.SOCK_DGRAM: [socket.SOL_UDP], + socket.SOCK_STREAM: [socket.SOL_TCP], +} + +_resolver = None +_original_getaddrinfo = socket.getaddrinfo +_original_getnameinfo = socket.getnameinfo +_original_getfqdn = socket.getfqdn +_original_gethostbyname = socket.gethostbyname +_original_gethostbyname_ex = socket.gethostbyname_ex +_original_gethostbyaddr = socket.gethostbyaddr + + +def _getaddrinfo(host=None, service=None, family=socket.AF_UNSPEC, socktype=0, + proto=0, flags=0): + if flags & socket.AI_NUMERICHOST != 0: + # Short circuit directly into the system's getaddrinfo(). We're + # not adding any value in this case, and this avoids infinite loops + # because dns.query.* needs to call getaddrinfo() for IPv6 scoping + # reasons. We will also do this short circuit below if we + # discover that the host is an address literal. + return _original_getaddrinfo(host, service, family, socktype, proto, + flags) + if flags & (socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) != 0: + # Not implemented. We raise a gaierror as opposed to a + # NotImplementedError as it helps callers handle errors more + # appropriately. [Issue #316] + # + # We raise EAI_FAIL as opposed to EAI_SYSTEM because there is + # no EAI_SYSTEM on Windows [Issue #416]. We didn't go for + # EAI_BADFLAGS as the flags aren't bad, we just don't + # implement them. + raise socket.gaierror(socket.EAI_FAIL, + 'Non-recoverable failure in name resolution') + if host is None and service is None: + raise socket.gaierror(socket.EAI_NONAME, 'Name or service not known') + v6addrs = [] + v4addrs = [] + canonical_name = None # pylint: disable=redefined-outer-name + # Is host None or an address literal? If so, use the system's + # getaddrinfo(). + if host is None: + return _original_getaddrinfo(host, service, family, socktype, + proto, flags) + try: + # We don't care about the result of af_for_address(), we're just + # calling it so it raises an exception if host is not an IPv4 or + # IPv6 address. + dns.inet.af_for_address(host) + return _original_getaddrinfo(host, service, family, socktype, + proto, flags) + except Exception: + pass + # Something needs resolution! + try: + if family == socket.AF_INET6 or family == socket.AF_UNSPEC: + v6 = _resolver.resolve(host, dns.rdatatype.AAAA, + raise_on_no_answer=False) + # Note that setting host ensures we query the same name + # for A as we did for AAAA. + host = v6.qname + canonical_name = v6.canonical_name.to_text(True) + if v6.rrset is not None: + for rdata in v6.rrset: + v6addrs.append(rdata.address) + if family == socket.AF_INET or family == socket.AF_UNSPEC: + v4 = _resolver.resolve(host, dns.rdatatype.A, + raise_on_no_answer=False) + host = v4.qname + canonical_name = v4.canonical_name.to_text(True) + if v4.rrset is not None: + for rdata in v4.rrset: + v4addrs.append(rdata.address) + except dns.resolver.NXDOMAIN: + raise socket.gaierror(socket.EAI_NONAME, 'Name or service not known') + except Exception: + # We raise EAI_AGAIN here as the failure may be temporary + # (e.g. a timeout) and EAI_SYSTEM isn't defined on Windows. + # [Issue #416] + raise socket.gaierror(socket.EAI_AGAIN, + 'Temporary failure in name resolution') + port = None + try: + # Is it a port literal? + if service is None: + port = 0 + else: + port = int(service) + except Exception: + if flags & socket.AI_NUMERICSERV == 0: + try: + port = socket.getservbyname(service) + except Exception: + pass + if port is None: + raise socket.gaierror(socket.EAI_NONAME, 'Name or service not known') + tuples = [] + if socktype == 0: + socktypes = [socket.SOCK_DGRAM, socket.SOCK_STREAM] + else: + socktypes = [socktype] + if flags & socket.AI_CANONNAME != 0: + cname = canonical_name + else: + cname = '' + if family == socket.AF_INET6 or family == socket.AF_UNSPEC: + for addr in v6addrs: + for socktype in socktypes: + for proto in _protocols_for_socktype[socktype]: + tuples.append((socket.AF_INET6, socktype, proto, + cname, (addr, port, 0, 0))) + if family == socket.AF_INET or family == socket.AF_UNSPEC: + for addr in v4addrs: + for socktype in socktypes: + for proto in _protocols_for_socktype[socktype]: + tuples.append((socket.AF_INET, socktype, proto, + cname, (addr, port))) + if len(tuples) == 0: + raise socket.gaierror(socket.EAI_NONAME, 'Name or service not known') + return tuples + + +def _getnameinfo(sockaddr, flags=0): + host = sockaddr[0] + port = sockaddr[1] + if len(sockaddr) == 4: + scope = sockaddr[3] + family = socket.AF_INET6 + else: + scope = None + family = socket.AF_INET + tuples = _getaddrinfo(host, port, family, socket.SOCK_STREAM, + socket.SOL_TCP, 0) + if len(tuples) > 1: + raise socket.error('sockaddr resolved to multiple addresses') + addr = tuples[0][4][0] + if flags & socket.NI_DGRAM: + pname = 'udp' + else: + pname = 'tcp' + qname = dns.reversename.from_address(addr) + if flags & socket.NI_NUMERICHOST == 0: + try: + answer = _resolver.resolve(qname, 'PTR') + hostname = answer.rrset[0].target.to_text(True) + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + if flags & socket.NI_NAMEREQD: + raise socket.gaierror(socket.EAI_NONAME, + 'Name or service not known') + hostname = addr + if scope is not None: + hostname += '%' + str(scope) + else: + hostname = addr + if scope is not None: + hostname += '%' + str(scope) + if flags & socket.NI_NUMERICSERV: + service = str(port) + else: + service = socket.getservbyport(port, pname) + return (hostname, service) + + +def _getfqdn(name=None): + if name is None: + name = socket.gethostname() + try: + (name, _, _) = _gethostbyaddr(name) + # Python's version checks aliases too, but our gethostbyname + # ignores them, so we do so here as well. + except Exception: + pass + return name + + +def _gethostbyname(name): + return _gethostbyname_ex(name)[2][0] + + +def _gethostbyname_ex(name): + aliases = [] + addresses = [] + tuples = _getaddrinfo(name, 0, socket.AF_INET, socket.SOCK_STREAM, + socket.SOL_TCP, socket.AI_CANONNAME) + canonical = tuples[0][3] + for item in tuples: + addresses.append(item[4][0]) + # XXX we just ignore aliases + return (canonical, aliases, addresses) + + +def _gethostbyaddr(ip): + try: + dns.ipv6.inet_aton(ip) + sockaddr = (ip, 80, 0, 0) + family = socket.AF_INET6 + except Exception: + try: + dns.ipv4.inet_aton(ip) + except Exception: + raise socket.gaierror(socket.EAI_NONAME, + 'Name or service not known') + sockaddr = (ip, 80) + family = socket.AF_INET + (name, _) = _getnameinfo(sockaddr, socket.NI_NAMEREQD) + aliases = [] + addresses = [] + tuples = _getaddrinfo(name, 0, family, socket.SOCK_STREAM, socket.SOL_TCP, + socket.AI_CANONNAME) + canonical = tuples[0][3] + # We only want to include an address from the tuples if it's the + # same as the one we asked about. We do this comparison in binary + # to avoid any differences in text representations. + bin_ip = dns.inet.inet_pton(family, ip) + for item in tuples: + addr = item[4][0] + bin_addr = dns.inet.inet_pton(family, addr) + if bin_ip == bin_addr: + addresses.append(addr) + # XXX we just ignore aliases + return (canonical, aliases, addresses) + + +def override_system_resolver(resolver=None): + """Override the system resolver routines in the socket module with + versions which use dnspython's resolver. + + This can be useful in testing situations where you want to control + the resolution behavior of python code without having to change + the system's resolver settings (e.g. /etc/resolv.conf). + + The resolver to use may be specified; if it's not, the default + resolver will be used. + + resolver, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. + """ + + if resolver is None: + resolver = get_default_resolver() + global _resolver + _resolver = resolver + socket.getaddrinfo = _getaddrinfo + socket.getnameinfo = _getnameinfo + socket.getfqdn = _getfqdn + socket.gethostbyname = _gethostbyname + socket.gethostbyname_ex = _gethostbyname_ex + socket.gethostbyaddr = _gethostbyaddr + + +def restore_system_resolver(): + """Undo the effects of prior override_system_resolver().""" + + global _resolver + _resolver = None + socket.getaddrinfo = _original_getaddrinfo + socket.getnameinfo = _original_getnameinfo + socket.getfqdn = _original_getfqdn + socket.gethostbyname = _original_gethostbyname + socket.gethostbyname_ex = _original_gethostbyname_ex + socket.gethostbyaddr = _original_gethostbyaddr diff --git a/venv/lib/python3.10/site-packages/dns/resolver.pyi b/venv/lib/python3.10/site-packages/dns/resolver.pyi new file mode 100644 index 0000000..348df4d --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/resolver.pyi @@ -0,0 +1,66 @@ +from typing import Union, Optional, List, Any, Dict +from . import exception, rdataclass, name, rdatatype + +import socket +_gethostbyname = socket.gethostbyname + +class NXDOMAIN(exception.DNSException): ... +class YXDOMAIN(exception.DNSException): ... +class NoAnswer(exception.DNSException): ... +class NoNameservers(exception.DNSException): ... +class NotAbsolute(exception.DNSException): ... +class NoRootSOA(exception.DNSException): ... +class NoMetaqueries(exception.DNSException): ... +class NoResolverConfiguration(exception.DNSException): ... +Timeout = exception.Timeout + +def resolve(qname : str, rdtype : Union[int,str] = 0, + rdclass : Union[int,str] = 0, + tcp=False, source=None, raise_on_no_answer=True, + source_port=0, lifetime : Optional[float]=None, + search : Optional[bool]=None): + ... +def query(qname : str, rdtype : Union[int,str] = 0, + rdclass : Union[int,str] = 0, + tcp=False, source=None, raise_on_no_answer=True, + source_port=0, lifetime : Optional[float]=None): + ... +def resolve_address(ipaddr: str, *args: Any, **kwargs: Optional[Dict]): + ... +class LRUCache: + def __init__(self, max_size=1000): + ... + def get(self, key): + ... + def put(self, key, val): + ... +class Answer: + def __init__(self, qname, rdtype, rdclass, response, + raise_on_no_answer=True): + ... +def zone_for_name(name, rdclass : int = rdataclass.IN, tcp=False, + resolver : Optional[Resolver] = None): + ... + +class Resolver: + def __init__(self, filename : Optional[str] = '/etc/resolv.conf', + configure : Optional[bool] = True): + self.nameservers : List[str] + def resolve(self, qname : str, rdtype : Union[int,str] = rdatatype.A, + rdclass : Union[int,str] = rdataclass.IN, + tcp : bool = False, source : Optional[str] = None, + raise_on_no_answer=True, source_port : int = 0, + lifetime : Optional[float]=None, + search : Optional[bool]=None): + ... + def query(self, qname : str, rdtype : Union[int,str] = rdatatype.A, + rdclass : Union[int,str] = rdataclass.IN, + tcp : bool = False, source : Optional[str] = None, + raise_on_no_answer=True, source_port : int = 0, + lifetime : Optional[float]=None): + ... +default_resolver: typing.Optional[Resolver] +def reset_default_resolver() -> None: + ... +def get_default_resolver() -> Resolver: + ... diff --git a/venv/lib/python3.10/site-packages/dns/reversename.py b/venv/lib/python3.10/site-packages/dns/reversename.py new file mode 100644 index 0000000..e0beb03 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/reversename.py @@ -0,0 +1,100 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Reverse Map Names.""" + +import binascii + +import dns.name +import dns.ipv6 +import dns.ipv4 + +ipv4_reverse_domain = dns.name.from_text('in-addr.arpa.') +ipv6_reverse_domain = dns.name.from_text('ip6.arpa.') + + +def from_address(text, v4_origin=ipv4_reverse_domain, + v6_origin=ipv6_reverse_domain): + """Convert an IPv4 or IPv6 address in textual form into a Name object whose + value is the reverse-map domain name of the address. + + *text*, a ``str``, is an IPv4 or IPv6 address in textual form + (e.g. '127.0.0.1', '::1') + + *v4_origin*, a ``dns.name.Name`` to append to the labels corresponding to + the address if the address is an IPv4 address, instead of the default + (in-addr.arpa.) + + *v6_origin*, a ``dns.name.Name`` to append to the labels corresponding to + the address if the address is an IPv6 address, instead of the default + (ip6.arpa.) + + Raises ``dns.exception.SyntaxError`` if the address is badly formed. + + Returns a ``dns.name.Name``. + """ + + try: + v6 = dns.ipv6.inet_aton(text) + if dns.ipv6.is_mapped(v6): + parts = ['%d' % byte for byte in v6[12:]] + origin = v4_origin + else: + parts = [x for x in str(binascii.hexlify(v6).decode())] + origin = v6_origin + except Exception: + parts = ['%d' % + byte for byte in dns.ipv4.inet_aton(text)] + origin = v4_origin + return dns.name.from_text('.'.join(reversed(parts)), origin=origin) + + +def to_address(name, v4_origin=ipv4_reverse_domain, + v6_origin=ipv6_reverse_domain): + """Convert a reverse map domain name into textual address form. + + *name*, a ``dns.name.Name``, an IPv4 or IPv6 address in reverse-map name + form. + + *v4_origin*, a ``dns.name.Name`` representing the top-level domain for + IPv4 addresses, instead of the default (in-addr.arpa.) + + *v6_origin*, a ``dns.name.Name`` representing the top-level domain for + IPv4 addresses, instead of the default (ip6.arpa.) + + Raises ``dns.exception.SyntaxError`` if the name does not have a + reverse-map form. + + Returns a ``str``. + """ + + if name.is_subdomain(v4_origin): + name = name.relativize(v4_origin) + text = b'.'.join(reversed(name.labels)) + # run through inet_ntoa() to check syntax and make pretty. + return dns.ipv4.inet_ntoa(dns.ipv4.inet_aton(text)) + elif name.is_subdomain(v6_origin): + name = name.relativize(v6_origin) + labels = list(reversed(name.labels)) + parts = [] + for i in range(0, len(labels), 4): + parts.append(b''.join(labels[i:i + 4])) + text = b':'.join(parts) + # run through inet_ntoa() to check syntax and make pretty. + return dns.ipv6.inet_ntoa(dns.ipv6.inet_aton(text)) + else: + raise dns.exception.SyntaxError('unknown reverse-map address family') diff --git a/venv/lib/python3.10/site-packages/dns/reversename.pyi b/venv/lib/python3.10/site-packages/dns/reversename.pyi new file mode 100644 index 0000000..97f072e --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/reversename.pyi @@ -0,0 +1,6 @@ +from . import name +def from_address(text : str) -> name.Name: + ... + +def to_address(name : name.Name) -> str: + ... diff --git a/venv/lib/python3.10/site-packages/dns/rrset.py b/venv/lib/python3.10/site-packages/dns/rrset.py new file mode 100644 index 0000000..a71d457 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rrset.py @@ -0,0 +1,229 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS RRsets (an RRset is a named rdataset)""" + + +import dns.name +import dns.rdataset +import dns.rdataclass +import dns.renderer + + +class RRset(dns.rdataset.Rdataset): + + """A DNS RRset (named rdataset). + + RRset inherits from Rdataset, and RRsets can be treated as + Rdatasets in most cases. There are, however, a few notable + exceptions. RRsets have different to_wire() and to_text() method + arguments, reflecting the fact that RRsets always have an owner + name. + """ + + __slots__ = ['name', 'deleting'] + + def __init__(self, name, rdclass, rdtype, covers=dns.rdatatype.NONE, + deleting=None): + """Create a new RRset.""" + + super().__init__(rdclass, rdtype, covers) + self.name = name + self.deleting = deleting + + def _clone(self): + obj = super()._clone() + obj.name = self.name + obj.deleting = self.deleting + return obj + + def __repr__(self): + if self.covers == 0: + ctext = '' + else: + ctext = '(' + dns.rdatatype.to_text(self.covers) + ')' + if self.deleting is not None: + dtext = ' delete=' + dns.rdataclass.to_text(self.deleting) + else: + dtext = '' + return '' + + def __str__(self): + return self.to_text() + + def __eq__(self, other): + if isinstance(other, RRset): + if self.name != other.name: + return False + elif not isinstance(other, dns.rdataset.Rdataset): + return False + return super().__eq__(other) + + def match(self, *args, **kwargs): + """Does this rrset match the specified attributes? + + Behaves as :py:func:`full_match()` if the first argument is a + ``dns.name.Name``, and as :py:func:`dns.rdataset.Rdataset.match()` + otherwise. + + (This behavior fixes a design mistake where the signature of this + method became incompatible with that of its superclass. The fix + makes RRsets matchable as Rdatasets while preserving backwards + compatibility.) + """ + if isinstance(args[0], dns.name.Name): + return self.full_match(*args, **kwargs) + else: + return super().match(*args, **kwargs) + + def full_match(self, name, rdclass, rdtype, covers, + deleting=None): + """Returns ``True`` if this rrset matches the specified name, class, + type, covers, and deletion state. + """ + if not super().match(rdclass, rdtype, covers): + return False + if self.name != name or self.deleting != deleting: + return False + return True + + # pylint: disable=arguments-differ + + def to_text(self, origin=None, relativize=True, **kw): + """Convert the RRset into DNS zone file format. + + See ``dns.name.Name.choose_relativity`` for more information + on how *origin* and *relativize* determine the way names + are emitted. + + Any additional keyword arguments are passed on to the rdata + ``to_text()`` method. + + *origin*, a ``dns.name.Name`` or ``None``, the origin for relative + names. + + *relativize*, a ``bool``. If ``True``, names will be relativized + to *origin*. + """ + + return super().to_text(self.name, origin, relativize, + self.deleting, **kw) + + def to_wire(self, file, compress=None, origin=None, + **kw): + """Convert the RRset to wire format. + + All keyword arguments are passed to ``dns.rdataset.to_wire()``; see + that function for details. + + Returns an ``int``, the number of records emitted. + """ + + return super().to_wire(self.name, file, compress, origin, + self.deleting, **kw) + + # pylint: enable=arguments-differ + + def to_rdataset(self): + """Convert an RRset into an Rdataset. + + Returns a ``dns.rdataset.Rdataset``. + """ + return dns.rdataset.from_rdata_list(self.ttl, list(self)) + + +def from_text_list(name, ttl, rdclass, rdtype, text_rdatas, + idna_codec=None, origin=None, relativize=True, + relativize_to=None): + """Create an RRset with the specified name, TTL, class, and type, and with + the specified list of rdatas in text format. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use; if ``None``, the default IDNA 2003 + encoder/decoder is used. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + Returns a ``dns.rrset.RRset`` object. + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None, idna_codec=idna_codec) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + r = RRset(name, rdclass, rdtype) + r.update_ttl(ttl) + for t in text_rdatas: + rd = dns.rdata.from_text(r.rdclass, r.rdtype, t, origin, relativize, + relativize_to, idna_codec) + r.add(rd) + return r + + +def from_text(name, ttl, rdclass, rdtype, *text_rdatas): + """Create an RRset with the specified name, TTL, class, and type and with + the specified rdatas in text format. + + Returns a ``dns.rrset.RRset`` object. + """ + + return from_text_list(name, ttl, rdclass, rdtype, text_rdatas) + + +def from_rdata_list(name, ttl, rdatas, idna_codec=None): + """Create an RRset with the specified name and TTL, and with + the specified list of rdata objects. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use; if ``None``, the default IDNA 2003 + encoder/decoder is used. + + Returns a ``dns.rrset.RRset`` object. + + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None, idna_codec=idna_codec) + + if len(rdatas) == 0: + raise ValueError("rdata list must not be empty") + r = None + for rd in rdatas: + if r is None: + r = RRset(name, rd.rdclass, rd.rdtype) + r.update_ttl(ttl) + r.add(rd) + return r + + +def from_rdata(name, ttl, *rdatas): + """Create an RRset with the specified name and TTL, and with + the specified rdata objects. + + Returns a ``dns.rrset.RRset`` object. + """ + + return from_rdata_list(name, ttl, rdatas) diff --git a/venv/lib/python3.10/site-packages/dns/rrset.pyi b/venv/lib/python3.10/site-packages/dns/rrset.pyi new file mode 100644 index 0000000..0a81a2a --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/rrset.pyi @@ -0,0 +1,10 @@ +from typing import List, Optional +from . import rdataset, rdatatype + +class RRset(rdataset.Rdataset): + def __init__(self, name, rdclass : int , rdtype : int, covers=rdatatype.NONE, + deleting : Optional[int] =None) -> None: + self.name = name + self.deleting = deleting +def from_text(name : str, ttl : int, rdclass : str, rdtype : str, *text_rdatas : str): + ... diff --git a/venv/lib/python3.10/site-packages/dns/serial.py b/venv/lib/python3.10/site-packages/dns/serial.py new file mode 100644 index 0000000..b047415 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/serial.py @@ -0,0 +1,117 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""Serial Number Arthimetic from RFC 1982""" + +class Serial: + def __init__(self, value, bits=32): + self.value = value % 2 ** bits + self.bits = bits + + def __repr__(self): + return f'dns.serial.Serial({self.value}, {self.bits})' + + def __eq__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + return self.value == other.value + + def __ne__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + return self.value != other.value + + def __lt__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + if self.value < other.value and \ + other.value - self.value < 2 ** (self.bits - 1): + return True + elif self.value > other.value and \ + self.value - other.value > 2 ** (self.bits - 1): + return True + else: + return False + + def __le__(self, other): + return self == other or self < other + + def __gt__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + if self.value < other.value and \ + other.value - self.value > 2 ** (self.bits - 1): + return True + elif self.value > other.value and \ + self.value - other.value < 2 ** (self.bits - 1): + return True + else: + return False + + def __ge__(self, other): + return self == other or self > other + + def __add__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v += delta + v = v % 2 ** self.bits + return Serial(v, self.bits) + + def __iadd__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v += delta + v = v % 2 ** self.bits + self.value = v + return self + + def __sub__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v -= delta + v = v % 2 ** self.bits + return Serial(v, self.bits) + + def __isub__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v -= delta + v = v % 2 ** self.bits + self.value = v + return self diff --git a/venv/lib/python3.10/site-packages/dns/set.py b/venv/lib/python3.10/site-packages/dns/set.py new file mode 100644 index 0000000..1fd4d0a --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/set.py @@ -0,0 +1,278 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import itertools +import sys + +if sys.version_info >= (3, 7): + odict = dict +else: + from collections import OrderedDict as odict # pragma: no cover + +class Set: + + """A simple set class. + + This class was originally used to deal with sets being missing in + ancient versions of python, but dnspython will continue to use it + as these sets are based on lists and are thus indexable, and this + ability is widely used in dnspython applications. + """ + + __slots__ = ['items'] + + def __init__(self, items=None): + """Initialize the set. + + *items*, an iterable or ``None``, the initial set of items. + """ + + self.items = odict() + if items is not None: + for item in items: + self.add(item) + + def __repr__(self): + return "dns.set.Set(%s)" % repr(list(self.items.keys())) + + def add(self, item): + """Add an item to the set. + """ + + if item not in self.items: + self.items[item] = None + + def remove(self, item): + """Remove an item from the set. + """ + + try: + del self.items[item] + except KeyError: + raise ValueError + + def discard(self, item): + """Remove an item from the set if present. + """ + + self.items.pop(item, None) + + def _clone(self): + """Make a (shallow) copy of the set. + + There is a 'clone protocol' that subclasses of this class + should use. To make a copy, first call your super's _clone() + method, and use the object returned as the new instance. Then + make shallow copies of the attributes defined in the subclass. + + This protocol allows us to write the set algorithms that + return new instances (e.g. union) once, and keep using them in + subclasses. + """ + + if hasattr(self, '_clone_class'): + cls = self._clone_class + else: + cls = self.__class__ + obj = cls.__new__(cls) + obj.items = odict() + obj.items.update(self.items) + return obj + + def __copy__(self): + """Make a (shallow) copy of the set. + """ + + return self._clone() + + def copy(self): + """Make a (shallow) copy of the set. + """ + + return self._clone() + + def union_update(self, other): + """Update the set, adding any elements from other which are not + already in the set. + """ + + if not isinstance(other, Set): + raise ValueError('other must be a Set instance') + if self is other: + return + for item in other.items: + self.add(item) + + def intersection_update(self, other): + """Update the set, removing any elements from other which are not + in both sets. + """ + + if not isinstance(other, Set): + raise ValueError('other must be a Set instance') + if self is other: + return + # we make a copy of the list so that we can remove items from + # the list without breaking the iterator. + for item in list(self.items): + if item not in other.items: + del self.items[item] + + def difference_update(self, other): + """Update the set, removing any elements from other which are in + the set. + """ + + if not isinstance(other, Set): + raise ValueError('other must be a Set instance') + if self is other: + self.items.clear() + else: + for item in other.items: + self.discard(item) + + def union(self, other): + """Return a new set which is the union of ``self`` and ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.union_update(other) + return obj + + def intersection(self, other): + """Return a new set which is the intersection of ``self`` and + ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.intersection_update(other) + return obj + + def difference(self, other): + """Return a new set which ``self`` - ``other``, i.e. the items + in ``self`` which are not also in ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.difference_update(other) + return obj + + def __or__(self, other): + return self.union(other) + + def __and__(self, other): + return self.intersection(other) + + def __add__(self, other): + return self.union(other) + + def __sub__(self, other): + return self.difference(other) + + def __ior__(self, other): + self.union_update(other) + return self + + def __iand__(self, other): + self.intersection_update(other) + return self + + def __iadd__(self, other): + self.union_update(other) + return self + + def __isub__(self, other): + self.difference_update(other) + return self + + def update(self, other): + """Update the set, adding any elements from other which are not + already in the set. + + *other*, the collection of items with which to update the set, which + may be any iterable type. + """ + + for item in other: + self.add(item) + + def clear(self): + """Make the set empty.""" + self.items.clear() + + def __eq__(self, other): + if odict == dict: + return self.items == other.items + else: + # We don't want an ordered comparison. + if len(self.items) != len(other.items): + return False + return all(elt in other.items for elt in self.items) + + def __ne__(self, other): + return not self.__eq__(other) + + def __len__(self): + return len(self.items) + + def __iter__(self): + return iter(self.items) + + def __getitem__(self, i): + if isinstance(i, slice): + return list(itertools.islice(self.items, i.start, i.stop, i.step)) + else: + return next(itertools.islice(self.items, i, i + 1)) + + def __delitem__(self, i): + if isinstance(i, slice): + for elt in list(self[i]): + del self.items[elt] + else: + del self.items[self[i]] + + def issubset(self, other): + """Is this set a subset of *other*? + + Returns a ``bool``. + """ + + if not isinstance(other, Set): + raise ValueError('other must be a Set instance') + for item in self.items: + if item not in other.items: + return False + return True + + def issuperset(self, other): + """Is this set a superset of *other*? + + Returns a ``bool``. + """ + + if not isinstance(other, Set): + raise ValueError('other must be a Set instance') + for item in other.items: + if item not in self.items: + return False + return True diff --git a/venv/lib/python3.10/site-packages/dns/tokenizer.py b/venv/lib/python3.10/site-packages/dns/tokenizer.py new file mode 100644 index 0000000..cb6a630 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/tokenizer.py @@ -0,0 +1,680 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Tokenize DNS zone file format""" + +import io +import sys + +import dns.exception +import dns.name +import dns.ttl + +_DELIMITERS = {' ', '\t', '\n', ';', '(', ')', '"'} +_QUOTING_DELIMITERS = {'"'} + +EOF = 0 +EOL = 1 +WHITESPACE = 2 +IDENTIFIER = 3 +QUOTED_STRING = 4 +COMMENT = 5 +DELIMITER = 6 + + +class UngetBufferFull(dns.exception.DNSException): + """An attempt was made to unget a token when the unget buffer was full.""" + + +class Token: + """A DNS zone file format token. + + ttype: The token type + value: The token value + has_escape: Does the token value contain escapes? + """ + + def __init__(self, ttype, value='', has_escape=False, comment=None): + """Initialize a token instance.""" + + self.ttype = ttype + self.value = value + self.has_escape = has_escape + self.comment = comment + + def is_eof(self): + return self.ttype == EOF + + def is_eol(self): + return self.ttype == EOL + + def is_whitespace(self): + return self.ttype == WHITESPACE + + def is_identifier(self): + return self.ttype == IDENTIFIER + + def is_quoted_string(self): + return self.ttype == QUOTED_STRING + + def is_comment(self): + return self.ttype == COMMENT + + def is_delimiter(self): # pragma: no cover (we don't return delimiters yet) + return self.ttype == DELIMITER + + def is_eol_or_eof(self): + return self.ttype == EOL or self.ttype == EOF + + def __eq__(self, other): + if not isinstance(other, Token): + return False + return (self.ttype == other.ttype and + self.value == other.value) + + def __ne__(self, other): + if not isinstance(other, Token): + return True + return (self.ttype != other.ttype or + self.value != other.value) + + def __str__(self): + return '%d "%s"' % (self.ttype, self.value) + + def unescape(self): + if not self.has_escape: + return self + unescaped = '' + l = len(self.value) + i = 0 + while i < l: + c = self.value[i] + i += 1 + if c == '\\': + if i >= l: # pragma: no cover (can't happen via get()) + raise dns.exception.UnexpectedEnd + c = self.value[i] + i += 1 + if c.isdigit(): + if i >= l: + raise dns.exception.UnexpectedEnd + c2 = self.value[i] + i += 1 + if i >= l: + raise dns.exception.UnexpectedEnd + c3 = self.value[i] + i += 1 + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + codepoint = int(c) * 100 + int(c2) * 10 + int(c3) + if codepoint > 255: + raise dns.exception.SyntaxError + c = chr(codepoint) + unescaped += c + return Token(self.ttype, unescaped) + + def unescape_to_bytes(self): + # We used to use unescape() for TXT-like records, but this + # caused problems as we'd process DNS escapes into Unicode code + # points instead of byte values, and then a to_text() of the + # processed data would not equal the original input. For + # example, \226 in the TXT record would have a to_text() of + # \195\162 because we applied UTF-8 encoding to Unicode code + # point 226. + # + # We now apply escapes while converting directly to bytes, + # avoiding this double encoding. + # + # This code also handles cases where the unicode input has + # non-ASCII code-points in it by converting it to UTF-8. TXT + # records aren't defined for Unicode, but this is the best we + # can do to preserve meaning. For example, + # + # foo\u200bbar + # + # (where \u200b is Unicode code point 0x200b) will be treated + # as if the input had been the UTF-8 encoding of that string, + # namely: + # + # foo\226\128\139bar + # + unescaped = b'' + l = len(self.value) + i = 0 + while i < l: + c = self.value[i] + i += 1 + if c == '\\': + if i >= l: # pragma: no cover (can't happen via get()) + raise dns.exception.UnexpectedEnd + c = self.value[i] + i += 1 + if c.isdigit(): + if i >= l: + raise dns.exception.UnexpectedEnd + c2 = self.value[i] + i += 1 + if i >= l: + raise dns.exception.UnexpectedEnd + c3 = self.value[i] + i += 1 + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + codepoint = int(c) * 100 + int(c2) * 10 + int(c3) + if codepoint > 255: + raise dns.exception.SyntaxError + unescaped += b'%c' % (codepoint) + else: + # Note that as mentioned above, if c is a Unicode + # code point outside of the ASCII range, then this + # += is converting that code point to its UTF-8 + # encoding and appending multiple bytes to + # unescaped. + unescaped += c.encode() + else: + unescaped += c.encode() + return Token(self.ttype, bytes(unescaped)) + + +class Tokenizer: + """A DNS zone file format tokenizer. + + A token object is basically a (type, value) tuple. The valid + types are EOF, EOL, WHITESPACE, IDENTIFIER, QUOTED_STRING, + COMMENT, and DELIMITER. + + file: The file to tokenize + + ungotten_char: The most recently ungotten character, or None. + + ungotten_token: The most recently ungotten token, or None. + + multiline: The current multiline level. This value is increased + by one every time a '(' delimiter is read, and decreased by one every time + a ')' delimiter is read. + + quoting: This variable is true if the tokenizer is currently + reading a quoted string. + + eof: This variable is true if the tokenizer has encountered EOF. + + delimiters: The current delimiter dictionary. + + line_number: The current line number + + filename: A filename that will be returned by the where() method. + + idna_codec: A dns.name.IDNACodec, specifies the IDNA + encoder/decoder. If None, the default IDNA 2003 + encoder/decoder is used. + """ + + def __init__(self, f=sys.stdin, filename=None, idna_codec=None): + """Initialize a tokenizer instance. + + f: The file to tokenize. The default is sys.stdin. + This parameter may also be a string, in which case the tokenizer + will take its input from the contents of the string. + + filename: the name of the filename that the where() method + will return. + + idna_codec: A dns.name.IDNACodec, specifies the IDNA + encoder/decoder. If None, the default IDNA 2003 + encoder/decoder is used. + """ + + if isinstance(f, str): + f = io.StringIO(f) + if filename is None: + filename = '' + elif isinstance(f, bytes): + f = io.StringIO(f.decode()) + if filename is None: + filename = '' + else: + if filename is None: + if f is sys.stdin: + filename = '' + else: + filename = '' + self.file = f + self.ungotten_char = None + self.ungotten_token = None + self.multiline = 0 + self.quoting = False + self.eof = False + self.delimiters = _DELIMITERS + self.line_number = 1 + self.filename = filename + if idna_codec is None: + idna_codec = dns.name.IDNA_2003 + self.idna_codec = idna_codec + + def _get_char(self): + """Read a character from input. + """ + + if self.ungotten_char is None: + if self.eof: + c = '' + else: + c = self.file.read(1) + if c == '': + self.eof = True + elif c == '\n': + self.line_number += 1 + else: + c = self.ungotten_char + self.ungotten_char = None + return c + + def where(self): + """Return the current location in the input. + + Returns a (string, int) tuple. The first item is the filename of + the input, the second is the current line number. + """ + + return (self.filename, self.line_number) + + def _unget_char(self, c): + """Unget a character. + + The unget buffer for characters is only one character large; it is + an error to try to unget a character when the unget buffer is not + empty. + + c: the character to unget + raises UngetBufferFull: there is already an ungotten char + """ + + if self.ungotten_char is not None: + # this should never happen! + raise UngetBufferFull # pragma: no cover + self.ungotten_char = c + + def skip_whitespace(self): + """Consume input until a non-whitespace character is encountered. + + The non-whitespace character is then ungotten, and the number of + whitespace characters consumed is returned. + + If the tokenizer is in multiline mode, then newlines are whitespace. + + Returns the number of characters skipped. + """ + + skipped = 0 + while True: + c = self._get_char() + if c != ' ' and c != '\t': + if (c != '\n') or not self.multiline: + self._unget_char(c) + return skipped + skipped += 1 + + def get(self, want_leading=False, want_comment=False): + """Get the next token. + + want_leading: If True, return a WHITESPACE token if the + first character read is whitespace. The default is False. + + want_comment: If True, return a COMMENT token if the + first token read is a comment. The default is False. + + Raises dns.exception.UnexpectedEnd: input ended prematurely + + Raises dns.exception.SyntaxError: input was badly formed + + Returns a Token. + """ + + if self.ungotten_token is not None: + token = self.ungotten_token + self.ungotten_token = None + if token.is_whitespace(): + if want_leading: + return token + elif token.is_comment(): + if want_comment: + return token + else: + return token + skipped = self.skip_whitespace() + if want_leading and skipped > 0: + return Token(WHITESPACE, ' ') + token = '' + ttype = IDENTIFIER + has_escape = False + while True: + c = self._get_char() + if c == '' or c in self.delimiters: + if c == '' and self.quoting: + raise dns.exception.UnexpectedEnd + if token == '' and ttype != QUOTED_STRING: + if c == '(': + self.multiline += 1 + self.skip_whitespace() + continue + elif c == ')': + if self.multiline <= 0: + raise dns.exception.SyntaxError + self.multiline -= 1 + self.skip_whitespace() + continue + elif c == '"': + if not self.quoting: + self.quoting = True + self.delimiters = _QUOTING_DELIMITERS + ttype = QUOTED_STRING + continue + else: + self.quoting = False + self.delimiters = _DELIMITERS + self.skip_whitespace() + continue + elif c == '\n': + return Token(EOL, '\n') + elif c == ';': + while 1: + c = self._get_char() + if c == '\n' or c == '': + break + token += c + if want_comment: + self._unget_char(c) + return Token(COMMENT, token) + elif c == '': + if self.multiline: + raise dns.exception.SyntaxError( + 'unbalanced parentheses') + return Token(EOF, comment=token) + elif self.multiline: + self.skip_whitespace() + token = '' + continue + else: + return Token(EOL, '\n', comment=token) + else: + # This code exists in case we ever want a + # delimiter to be returned. It never produces + # a token currently. + token = c + ttype = DELIMITER + else: + self._unget_char(c) + break + elif self.quoting and c == '\n': + raise dns.exception.SyntaxError('newline in quoted string') + elif c == '\\': + # + # It's an escape. Put it and the next character into + # the token; it will be checked later for goodness. + # + token += c + has_escape = True + c = self._get_char() + if c == '' or (c == '\n' and not self.quoting): + raise dns.exception.UnexpectedEnd + token += c + if token == '' and ttype != QUOTED_STRING: + if self.multiline: + raise dns.exception.SyntaxError('unbalanced parentheses') + ttype = EOF + return Token(ttype, token, has_escape) + + def unget(self, token): + """Unget a token. + + The unget buffer for tokens is only one token large; it is + an error to try to unget a token when the unget buffer is not + empty. + + token: the token to unget + + Raises UngetBufferFull: there is already an ungotten token + """ + + if self.ungotten_token is not None: + raise UngetBufferFull + self.ungotten_token = token + + def next(self): + """Return the next item in an iteration. + + Returns a Token. + """ + + token = self.get() + if token.is_eof(): + raise StopIteration + return token + + __next__ = next + + def __iter__(self): + return self + + # Helpers + + def get_int(self, base=10): + """Read the next token and interpret it as an unsigned integer. + + Raises dns.exception.SyntaxError if not an unsigned integer. + + Returns an int. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError('expecting an identifier') + if not token.value.isdigit(): + raise dns.exception.SyntaxError('expecting an integer') + return int(token.value, base) + + def get_uint8(self): + """Read the next token and interpret it as an 8-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not an 8-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int() + if value < 0 or value > 255: + raise dns.exception.SyntaxError( + '%d is not an unsigned 8-bit integer' % value) + return value + + def get_uint16(self, base=10): + """Read the next token and interpret it as a 16-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 16-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int(base=base) + if value < 0 or value > 65535: + if base == 8: + raise dns.exception.SyntaxError( + '%o is not an octal unsigned 16-bit integer' % value) + else: + raise dns.exception.SyntaxError( + '%d is not an unsigned 16-bit integer' % value) + return value + + def get_uint32(self, base=10): + """Read the next token and interpret it as a 32-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 32-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int(base=base) + if value < 0 or value > 4294967295: + raise dns.exception.SyntaxError( + '%d is not an unsigned 32-bit integer' % value) + return value + + def get_uint48(self, base=10): + """Read the next token and interpret it as a 48-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 48-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int(base=base) + if value < 0 or value > 281474976710655: + raise dns.exception.SyntaxError( + '%d is not an unsigned 48-bit integer' % value) + return value + + def get_string(self, max_length=None): + """Read the next token and interpret it as a string. + + Raises dns.exception.SyntaxError if not a string. + Raises dns.exception.SyntaxError if token value length + exceeds max_length (if specified). + + Returns a string. + """ + + token = self.get().unescape() + if not (token.is_identifier() or token.is_quoted_string()): + raise dns.exception.SyntaxError('expecting a string') + if max_length and len(token.value) > max_length: + raise dns.exception.SyntaxError("string too long") + return token.value + + def get_identifier(self): + """Read the next token, which should be an identifier. + + Raises dns.exception.SyntaxError if not an identifier. + + Returns a string. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError('expecting an identifier') + return token.value + + def get_remaining(self, max_tokens=None): + """Return the remaining tokens on the line, until an EOL or EOF is seen. + + max_tokens: If not None, stop after this number of tokens. + + Returns a list of tokens. + """ + + tokens = [] + while True: + token = self.get() + if token.is_eol_or_eof(): + self.unget(token) + break + tokens.append(token) + if len(tokens) == max_tokens: + break + return tokens + + def concatenate_remaining_identifiers(self, allow_empty=False): + """Read the remaining tokens on the line, which should be identifiers. + + Raises dns.exception.SyntaxError if there are no remaining tokens, + unless `allow_empty=True` is given. + + Raises dns.exception.SyntaxError if a token is seen that is not an + identifier. + + Returns a string containing a concatenation of the remaining + identifiers. + """ + s = "" + while True: + token = self.get().unescape() + if token.is_eol_or_eof(): + self.unget(token) + break + if not token.is_identifier(): + raise dns.exception.SyntaxError + s += token.value + if not (allow_empty or s): + raise dns.exception.SyntaxError('expecting another identifier') + return s + + def as_name(self, token, origin=None, relativize=False, relativize_to=None): + """Try to interpret the token as a DNS name. + + Raises dns.exception.SyntaxError if not a name. + + Returns a dns.name.Name. + """ + if not token.is_identifier(): + raise dns.exception.SyntaxError('expecting an identifier') + name = dns.name.from_text(token.value, origin, self.idna_codec) + return name.choose_relativity(relativize_to or origin, relativize) + + def get_name(self, origin=None, relativize=False, relativize_to=None): + """Read the next token and interpret it as a DNS name. + + Raises dns.exception.SyntaxError if not a name. + + Returns a dns.name.Name. + """ + + token = self.get() + return self.as_name(token, origin, relativize, relativize_to) + + def get_eol_as_token(self): + """Read the next token and raise an exception if it isn't EOL or + EOF. + + Returns a string. + """ + + token = self.get() + if not token.is_eol_or_eof(): + raise dns.exception.SyntaxError( + 'expected EOL or EOF, got %d "%s"' % (token.ttype, + token.value)) + return token + + def get_eol(self): + return self.get_eol_as_token().value + + def get_ttl(self): + """Read the next token and interpret it as a DNS TTL. + + Raises dns.exception.SyntaxError or dns.ttl.BadTTL if not an + identifier or badly formed. + + Returns an int. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError('expecting an identifier') + return dns.ttl.from_text(token.value) diff --git a/venv/lib/python3.10/site-packages/dns/transaction.py b/venv/lib/python3.10/site-packages/dns/transaction.py new file mode 100644 index 0000000..d725492 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/transaction.py @@ -0,0 +1,596 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import collections + +import dns.exception +import dns.name +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rrset +import dns.serial +import dns.ttl + + +class TransactionManager: + def reader(self): + """Begin a read-only transaction.""" + raise NotImplementedError # pragma: no cover + + def writer(self, replacement=False): + """Begin a writable transaction. + + *replacement*, a ``bool``. If `True`, the content of the + transaction completely replaces any prior content. If False, + the default, then the content of the transaction updates the + existing content. + """ + raise NotImplementedError # pragma: no cover + + def origin_information(self): + """Returns a tuple + + (absolute_origin, relativize, effective_origin) + + giving the absolute name of the default origin for any + relative domain names, the "effective origin", and whether + names should be relativized. The "effective origin" is the + absolute origin if relativize is False, and the empty name if + relativize is true. (The effective origin is provided even + though it can be computed from the absolute_origin and + relativize setting because it avoids a lot of code + duplication.) + + If the returned names are `None`, then no origin information is + available. + + This information is used by code working with transactions to + allow it to coordinate relativization. The transaction code + itself takes what it gets (i.e. does not change name + relativity). + + """ + raise NotImplementedError # pragma: no cover + + def get_class(self): + """The class of the transaction manager. + """ + raise NotImplementedError # pragma: no cover + + def from_wire_origin(self): + """Origin to use in from_wire() calls. + """ + (absolute_origin, relativize, _) = self.origin_information() + if relativize: + return absolute_origin + else: + return None + + +class DeleteNotExact(dns.exception.DNSException): + """Existing data did not match data specified by an exact delete.""" + + +class ReadOnly(dns.exception.DNSException): + """Tried to write to a read-only transaction.""" + + +class AlreadyEnded(dns.exception.DNSException): + """Tried to use an already-ended transaction.""" + + +def _ensure_immutable_rdataset(rdataset): + if rdataset is None or isinstance(rdataset, dns.rdataset.ImmutableRdataset): + return rdataset + return dns.rdataset.ImmutableRdataset(rdataset) + +def _ensure_immutable_node(node): + if node is None or node.is_immutable(): + return node + return dns.node.ImmutableNode(node) + + +class Transaction: + + def __init__(self, manager, replacement=False, read_only=False): + self.manager = manager + self.replacement = replacement + self.read_only = read_only + self._ended = False + self._check_put_rdataset = [] + self._check_delete_rdataset = [] + self._check_delete_name = [] + + # + # This is the high level API + # + + def get(self, name, rdtype, covers=dns.rdatatype.NONE): + """Return the rdataset associated with *name*, *rdtype*, and *covers*, + or `None` if not found. + + Note that the returned rdataset is immutable. + """ + self._check_ended() + if isinstance(name, str): + name = dns.name.from_text(name, None) + rdtype = dns.rdatatype.RdataType.make(rdtype) + rdataset = self._get_rdataset(name, rdtype, covers) + return _ensure_immutable_rdataset(rdataset) + + def get_node(self, name): + """Return the node at *name*, if any. + + Returns an immutable node or ``None``. + """ + return _ensure_immutable_node(self._get_node(name)) + + def _check_read_only(self): + if self.read_only: + raise ReadOnly + + def add(self, *args): + """Add records. + + The arguments may be: + + - rrset + + - name, rdataset... + + - name, ttl, rdata... + """ + self._check_ended() + self._check_read_only() + return self._add(False, args) + + def replace(self, *args): + """Replace the existing rdataset at the name with the specified + rdataset, or add the specified rdataset if there was no existing + rdataset. + + The arguments may be: + + - rrset + + - name, rdataset... + + - name, ttl, rdata... + + Note that if you want to replace the entire node, you should do + a delete of the name followed by one or more calls to add() or + replace(). + """ + self._check_ended() + self._check_read_only() + return self._add(True, args) + + def delete(self, *args): + """Delete records. + + It is not an error if some of the records are not in the existing + set. + + The arguments may be: + + - rrset + + - name + + - name, rdataclass, rdatatype, [covers] + + - name, rdataset... + + - name, rdata... + """ + self._check_ended() + self._check_read_only() + return self._delete(False, args) + + def delete_exact(self, *args): + """Delete records. + + The arguments may be: + + - rrset + + - name + + - name, rdataclass, rdatatype, [covers] + + - name, rdataset... + + - name, rdata... + + Raises dns.transaction.DeleteNotExact if some of the records + are not in the existing set. + + """ + self._check_ended() + self._check_read_only() + return self._delete(True, args) + + def name_exists(self, name): + """Does the specified name exist?""" + self._check_ended() + if isinstance(name, str): + name = dns.name.from_text(name, None) + return self._name_exists(name) + + def update_serial(self, value=1, relative=True, name=dns.name.empty): + """Update the serial number. + + *value*, an `int`, is an increment if *relative* is `True`, or the + actual value to set if *relative* is `False`. + + Raises `KeyError` if there is no SOA rdataset at *name*. + + Raises `ValueError` if *value* is negative or if the increment is + so large that it would cause the new serial to be less than the + prior value. + """ + self._check_ended() + if value < 0: + raise ValueError('negative update_serial() value') + if isinstance(name, str): + name = dns.name.from_text(name, None) + rdataset = self._get_rdataset(name, dns.rdatatype.SOA, + dns.rdatatype.NONE) + if rdataset is None or len(rdataset) == 0: + raise KeyError + if relative: + serial = dns.serial.Serial(rdataset[0].serial) + value + else: + serial = dns.serial.Serial(value) + serial = serial.value # convert back to int + if serial == 0: + serial = 1 + rdata = rdataset[0].replace(serial=serial) + new_rdataset = dns.rdataset.from_rdata(rdataset.ttl, rdata) + self.replace(name, new_rdataset) + + def __iter__(self): + self._check_ended() + return self._iterate_rdatasets() + + def changed(self): + """Has this transaction changed anything? + + For read-only transactions, the result is always `False`. + + For writable transactions, the result is `True` if at some time + during the life of the transaction, the content was changed. + """ + self._check_ended() + return self._changed() + + def commit(self): + """Commit the transaction. + + Normally transactions are used as context managers and commit + or rollback automatically, but it may be done explicitly if needed. + A ``dns.transaction.Ended`` exception will be raised if you try + to use a transaction after it has been committed or rolled back. + + Raises an exception if the commit fails (in which case the transaction + is also rolled back. + """ + self._end(True) + + def rollback(self): + """Rollback the transaction. + + Normally transactions are used as context managers and commit + or rollback automatically, but it may be done explicitly if needed. + A ``dns.transaction.AlreadyEnded`` exception will be raised if you try + to use a transaction after it has been committed or rolled back. + + Rollback cannot otherwise fail. + """ + self._end(False) + + def check_put_rdataset(self, check): + """Call *check* before putting (storing) an rdataset. + + The function is called with the transaction, the name, and the rdataset. + + The check function may safely make non-mutating transaction method + calls, but behavior is undefined if mutating transaction methods are + called. The check function should raise an exception if it objects to + the put, and otherwise should return ``None``. + """ + self._check_put_rdataset.append(check) + + def check_delete_rdataset(self, check): + """Call *check* before deleting an rdataset. + + The function is called with the transaction, the name, the rdatatype, + and the covered rdatatype. + + The check function may safely make non-mutating transaction method + calls, but behavior is undefined if mutating transaction methods are + called. The check function should raise an exception if it objects to + the put, and otherwise should return ``None``. + """ + self._check_delete_rdataset.append(check) + + def check_delete_name(self, check): + """Call *check* before putting (storing) an rdataset. + + The function is called with the transaction and the name. + + The check function may safely make non-mutating transaction method + calls, but behavior is undefined if mutating transaction methods are + called. The check function should raise an exception if it objects to + the put, and otherwise should return ``None``. + """ + self._check_delete_name.append(check) + + # + # Helper methods + # + + def _raise_if_not_empty(self, method, args): + if len(args) != 0: + raise TypeError(f'extra parameters to {method}') + + def _rdataset_from_args(self, method, deleting, args): + try: + arg = args.popleft() + if isinstance(arg, dns.rrset.RRset): + rdataset = arg.to_rdataset() + elif isinstance(arg, dns.rdataset.Rdataset): + rdataset = arg + else: + if deleting: + ttl = 0 + else: + if isinstance(arg, int): + ttl = arg + if ttl > dns.ttl.MAX_TTL: + raise ValueError(f'{method}: TTL value too big') + else: + raise TypeError(f'{method}: expected a TTL') + arg = args.popleft() + if isinstance(arg, dns.rdata.Rdata): + rdataset = dns.rdataset.from_rdata(ttl, arg) + else: + raise TypeError(f'{method}: expected an Rdata') + return rdataset + except IndexError: + if deleting: + return None + else: + # reraise + raise TypeError(f'{method}: expected more arguments') + + def _add(self, replace, args): + try: + args = collections.deque(args) + if replace: + method = 'replace()' + else: + method = 'add()' + arg = args.popleft() + if isinstance(arg, str): + arg = dns.name.from_text(arg, None) + if isinstance(arg, dns.name.Name): + name = arg + rdataset = self._rdataset_from_args(method, False, args) + elif isinstance(arg, dns.rrset.RRset): + rrset = arg + name = rrset.name + # rrsets are also rdatasets, but they don't print the + # same and can't be stored in nodes, so convert. + rdataset = rrset.to_rdataset() + else: + raise TypeError(f'{method} requires a name or RRset ' + + 'as the first argument') + if rdataset.rdclass != self.manager.get_class(): + raise ValueError(f'{method} has objects of wrong RdataClass') + if rdataset.rdtype == dns.rdatatype.SOA: + (_, _, origin) = self._origin_information() + if name != origin: + raise ValueError(f'{method} has non-origin SOA') + self._raise_if_not_empty(method, args) + if not replace: + existing = self._get_rdataset(name, rdataset.rdtype, + rdataset.covers) + if existing is not None: + if isinstance(existing, dns.rdataset.ImmutableRdataset): + trds = dns.rdataset.Rdataset(existing.rdclass, + existing.rdtype, + existing.covers) + trds.update(existing) + existing = trds + rdataset = existing.union(rdataset) + self._checked_put_rdataset(name, rdataset) + except IndexError: + raise TypeError(f'not enough parameters to {method}') + + def _delete(self, exact, args): + try: + args = collections.deque(args) + if exact: + method = 'delete_exact()' + else: + method = 'delete()' + arg = args.popleft() + if isinstance(arg, str): + arg = dns.name.from_text(arg, None) + if isinstance(arg, dns.name.Name): + name = arg + if len(args) > 0 and (isinstance(args[0], int) or + isinstance(args[0], str)): + # deleting by type and (optionally) covers + rdtype = dns.rdatatype.RdataType.make(args.popleft()) + if len(args) > 0: + covers = dns.rdatatype.RdataType.make(args.popleft()) + else: + covers = dns.rdatatype.NONE + self._raise_if_not_empty(method, args) + existing = self._get_rdataset(name, rdtype, covers) + if existing is None: + if exact: + raise DeleteNotExact(f'{method}: missing rdataset') + else: + self._delete_rdataset(name, rdtype, covers) + return + else: + rdataset = self._rdataset_from_args(method, True, args) + elif isinstance(arg, dns.rrset.RRset): + rdataset = arg # rrsets are also rdatasets + name = rdataset.name + else: + raise TypeError(f'{method} requires a name or RRset ' + + 'as the first argument') + self._raise_if_not_empty(method, args) + if rdataset: + if rdataset.rdclass != self.manager.get_class(): + raise ValueError(f'{method} has objects of wrong ' + 'RdataClass') + existing = self._get_rdataset(name, rdataset.rdtype, + rdataset.covers) + if existing is not None: + if exact: + intersection = existing.intersection(rdataset) + if intersection != rdataset: + raise DeleteNotExact(f'{method}: missing rdatas') + rdataset = existing.difference(rdataset) + if len(rdataset) == 0: + self._checked_delete_rdataset(name, rdataset.rdtype, + rdataset.covers) + else: + self._checked_put_rdataset(name, rdataset) + elif exact: + raise DeleteNotExact(f'{method}: missing rdataset') + else: + if exact and not self._name_exists(name): + raise DeleteNotExact(f'{method}: name not known') + self._checked_delete_name(name) + except IndexError: + raise TypeError(f'not enough parameters to {method}') + + def _check_ended(self): + if self._ended: + raise AlreadyEnded + + def _end(self, commit): + self._check_ended() + if self._ended: + raise AlreadyEnded + try: + self._end_transaction(commit) + finally: + self._ended = True + + def _checked_put_rdataset(self, name, rdataset): + for check in self._check_put_rdataset: + check(self, name, rdataset) + self._put_rdataset(name, rdataset) + + def _checked_delete_rdataset(self, name, rdtype, covers): + for check in self._check_delete_rdataset: + check(self, name, rdtype, covers) + self._delete_rdataset(name, rdtype, covers) + + def _checked_delete_name(self, name): + for check in self._check_delete_name: + check(self, name) + self._delete_name(name) + + # + # Transactions are context managers. + # + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if not self._ended: + if exc_type is None: + self.commit() + else: + self.rollback() + return False + + # + # This is the low level API, which must be implemented by subclasses + # of Transaction. + # + + def _get_rdataset(self, name, rdtype, covers): + """Return the rdataset associated with *name*, *rdtype*, and *covers*, + or `None` if not found. + """ + raise NotImplementedError # pragma: no cover + + def _put_rdataset(self, name, rdataset): + """Store the rdataset.""" + raise NotImplementedError # pragma: no cover + + def _delete_name(self, name): + """Delete all data associated with *name*. + + It is not an error if the name does not exist. + """ + raise NotImplementedError # pragma: no cover + + def _delete_rdataset(self, name, rdtype, covers): + """Delete all data associated with *name*, *rdtype*, and *covers*. + + It is not an error if the rdataset does not exist. + """ + raise NotImplementedError # pragma: no cover + + def _name_exists(self, name): + """Does name exist? + + Returns a bool. + """ + raise NotImplementedError # pragma: no cover + + def _changed(self): + """Has this transaction changed anything?""" + raise NotImplementedError # pragma: no cover + + def _end_transaction(self, commit): + """End the transaction. + + *commit*, a bool. If ``True``, commit the transaction, otherwise + roll it back. + + If committing and the commit fails, then roll back and raise an + exception. + """ + raise NotImplementedError # pragma: no cover + + def _set_origin(self, origin): + """Set the origin. + + This method is called when reading a possibly relativized + source, and an origin setting operation occurs (e.g. $ORIGIN + in a zone file). + """ + raise NotImplementedError # pragma: no cover + + def _iterate_rdatasets(self): + """Return an iterator that yields (name, rdataset) tuples. + """ + raise NotImplementedError # pragma: no cover + + def _get_node(self, name): + """Return the node at *name*, if any. + + Returns a node or ``None``. + """ + raise NotImplementedError # pragma: no cover + + # + # Low-level API with a default implementation, in case a subclass needs + # to override. + # + + def _origin_information(self): + # This is only used by _add() + return self.manager.origin_information() diff --git a/venv/lib/python3.10/site-packages/dns/tsig.py b/venv/lib/python3.10/site-packages/dns/tsig.py new file mode 100644 index 0000000..50b2d47 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/tsig.py @@ -0,0 +1,346 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS TSIG support.""" + +import base64 +import hashlib +import hmac +import struct + +import dns.exception +import dns.rdataclass +import dns.name +import dns.rcode + +class BadTime(dns.exception.DNSException): + + """The current time is not within the TSIG's validity time.""" + + +class BadSignature(dns.exception.DNSException): + + """The TSIG signature fails to verify.""" + + +class BadKey(dns.exception.DNSException): + + """The TSIG record owner name does not match the key.""" + + +class BadAlgorithm(dns.exception.DNSException): + + """The TSIG algorithm does not match the key.""" + + +class PeerError(dns.exception.DNSException): + + """Base class for all TSIG errors generated by the remote peer""" + + +class PeerBadKey(PeerError): + + """The peer didn't know the key we used""" + + +class PeerBadSignature(PeerError): + + """The peer didn't like the signature we sent""" + + +class PeerBadTime(PeerError): + + """The peer didn't like the time we sent""" + + +class PeerBadTruncation(PeerError): + + """The peer didn't like amount of truncation in the TSIG we sent""" + + +# TSIG Algorithms + +HMAC_MD5 = dns.name.from_text("HMAC-MD5.SIG-ALG.REG.INT") +HMAC_SHA1 = dns.name.from_text("hmac-sha1") +HMAC_SHA224 = dns.name.from_text("hmac-sha224") +HMAC_SHA256 = dns.name.from_text("hmac-sha256") +HMAC_SHA256_128 = dns.name.from_text("hmac-sha256-128") +HMAC_SHA384 = dns.name.from_text("hmac-sha384") +HMAC_SHA384_192 = dns.name.from_text("hmac-sha384-192") +HMAC_SHA512 = dns.name.from_text("hmac-sha512") +HMAC_SHA512_256 = dns.name.from_text("hmac-sha512-256") +GSS_TSIG = dns.name.from_text("gss-tsig") + +default_algorithm = HMAC_SHA256 + + +class GSSTSig: + """ + GSS-TSIG TSIG implementation. This uses the GSS-API context established + in the TKEY message handshake to sign messages using GSS-API message + integrity codes, per the RFC. + + In order to avoid a direct GSSAPI dependency, the keyring holds a ref + to the GSSAPI object required, rather than the key itself. + """ + def __init__(self, gssapi_context): + self.gssapi_context = gssapi_context + self.data = b'' + self.name = 'gss-tsig' + + def update(self, data): + self.data += data + + def sign(self): + # defer to the GSSAPI function to sign + return self.gssapi_context.get_signature(self.data) + + def verify(self, expected): + try: + # defer to the GSSAPI function to verify + return self.gssapi_context.verify_signature(self.data, expected) + except Exception: + # note the usage of a bare exception + raise BadSignature + + +class GSSTSigAdapter: + def __init__(self, keyring): + self.keyring = keyring + + def __call__(self, message, keyname): + if keyname in self.keyring: + key = self.keyring[keyname] + if isinstance(key, Key) and key.algorithm == GSS_TSIG: + if message: + GSSTSigAdapter.parse_tkey_and_step(key, message, keyname) + return key + else: + return None + + @classmethod + def parse_tkey_and_step(cls, key, message, keyname): + # if the message is a TKEY type, absorb the key material + # into the context using step(); this is used to allow the + # client to complete the GSSAPI negotiation before attempting + # to verify the signed response to a TKEY message exchange + try: + rrset = message.find_rrset(message.answer, keyname, + dns.rdataclass.ANY, + dns.rdatatype.TKEY) + if rrset: + token = rrset[0].key + gssapi_context = key.secret + return gssapi_context.step(token) + except KeyError: + pass + + +class HMACTSig: + """ + HMAC TSIG implementation. This uses the HMAC python module to handle the + sign/verify operations. + """ + + _hashes = { + HMAC_SHA1: hashlib.sha1, + HMAC_SHA224: hashlib.sha224, + HMAC_SHA256: hashlib.sha256, + HMAC_SHA256_128: (hashlib.sha256, 128), + HMAC_SHA384: hashlib.sha384, + HMAC_SHA384_192: (hashlib.sha384, 192), + HMAC_SHA512: hashlib.sha512, + HMAC_SHA512_256: (hashlib.sha512, 256), + HMAC_MD5: hashlib.md5, + } + + def __init__(self, key, algorithm): + try: + hashinfo = self._hashes[algorithm] + except KeyError: + raise NotImplementedError(f"TSIG algorithm {algorithm} " + + "is not supported") + + # create the HMAC context + if isinstance(hashinfo, tuple): + self.hmac_context = hmac.new(key, digestmod=hashinfo[0]) + self.size = hashinfo[1] + else: + self.hmac_context = hmac.new(key, digestmod=hashinfo) + self.size = None + self.name = self.hmac_context.name + if self.size: + self.name += f'-{self.size}' + + def update(self, data): + return self.hmac_context.update(data) + + def sign(self): + # defer to the HMAC digest() function for that digestmod + digest = self.hmac_context.digest() + if self.size: + digest = digest[: (self.size // 8)] + return digest + + def verify(self, expected): + # re-digest and compare the results + mac = self.sign() + if not hmac.compare_digest(mac, expected): + raise BadSignature + + +def _digest(wire, key, rdata, time=None, request_mac=None, ctx=None, + multi=None): + """Return a context containing the TSIG rdata for the input parameters + @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object + @raises ValueError: I{other_data} is too long + @raises NotImplementedError: I{algorithm} is not supported + """ + + first = not (ctx and multi) + if first: + ctx = get_context(key) + if request_mac: + ctx.update(struct.pack('!H', len(request_mac))) + ctx.update(request_mac) + ctx.update(struct.pack('!H', rdata.original_id)) + ctx.update(wire[2:]) + if first: + ctx.update(key.name.to_digestable()) + ctx.update(struct.pack('!H', dns.rdataclass.ANY)) + ctx.update(struct.pack('!I', 0)) + if time is None: + time = rdata.time_signed + upper_time = (time >> 32) & 0xffff + lower_time = time & 0xffffffff + time_encoded = struct.pack('!HIH', upper_time, lower_time, rdata.fudge) + other_len = len(rdata.other) + if other_len > 65535: + raise ValueError('TSIG Other Data is > 65535 bytes') + if first: + ctx.update(key.algorithm.to_digestable() + time_encoded) + ctx.update(struct.pack('!HH', rdata.error, other_len) + rdata.other) + else: + ctx.update(time_encoded) + return ctx + + +def _maybe_start_digest(key, mac, multi): + """If this is the first message in a multi-message sequence, + start a new context. + @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object + """ + if multi: + ctx = get_context(key) + ctx.update(struct.pack('!H', len(mac))) + ctx.update(mac) + return ctx + else: + return None + + +def sign(wire, key, rdata, time=None, request_mac=None, ctx=None, multi=False): + """Return a (tsig_rdata, mac, ctx) tuple containing the HMAC TSIG rdata + for the input parameters, the HMAC MAC calculated by applying the + TSIG signature algorithm, and the TSIG digest context. + @rtype: (string, dns.tsig.HMACTSig or dns.tsig.GSSTSig object) + @raises ValueError: I{other_data} is too long + @raises NotImplementedError: I{algorithm} is not supported + """ + + ctx = _digest(wire, key, rdata, time, request_mac, ctx, multi) + mac = ctx.sign() + tsig = rdata.replace(time_signed=time, mac=mac) + + return (tsig, _maybe_start_digest(key, mac, multi)) + + +def validate(wire, key, owner, rdata, now, request_mac, tsig_start, ctx=None, + multi=False): + """Validate the specified TSIG rdata against the other input parameters. + + @raises FormError: The TSIG is badly formed. + @raises BadTime: There is too much time skew between the client and the + server. + @raises BadSignature: The TSIG signature did not validate + @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object""" + + (adcount,) = struct.unpack("!H", wire[10:12]) + if adcount == 0: + raise dns.exception.FormError + adcount -= 1 + new_wire = wire[0:10] + struct.pack("!H", adcount) + wire[12:tsig_start] + if rdata.error != 0: + if rdata.error == dns.rcode.BADSIG: + raise PeerBadSignature + elif rdata.error == dns.rcode.BADKEY: + raise PeerBadKey + elif rdata.error == dns.rcode.BADTIME: + raise PeerBadTime + elif rdata.error == dns.rcode.BADTRUNC: + raise PeerBadTruncation + else: + raise PeerError('unknown TSIG error code %d' % rdata.error) + if abs(rdata.time_signed - now) > rdata.fudge: + raise BadTime + if key.name != owner: + raise BadKey + if key.algorithm != rdata.algorithm: + raise BadAlgorithm + ctx = _digest(new_wire, key, rdata, None, request_mac, ctx, multi) + ctx.verify(rdata.mac) + return _maybe_start_digest(key, rdata.mac, multi) + + +def get_context(key): + """Returns an HMAC context for the specified key. + + @rtype: HMAC context + @raises NotImplementedError: I{algorithm} is not supported + """ + + if key.algorithm == GSS_TSIG: + return GSSTSig(key.secret) + else: + return HMACTSig(key.secret, key.algorithm) + + +class Key: + def __init__(self, name, secret, algorithm=default_algorithm): + if isinstance(name, str): + name = dns.name.from_text(name) + self.name = name + if isinstance(secret, str): + secret = base64.decodebytes(secret.encode()) + self.secret = secret + if isinstance(algorithm, str): + algorithm = dns.name.from_text(algorithm) + self.algorithm = algorithm + + def __eq__(self, other): + return (isinstance(other, Key) and + self.name == other.name and + self.secret == other.secret and + self.algorithm == other.algorithm) + + def __repr__(self): + r = f" Dict[name.Name,bytes]: + ... +def to_text(keyring : Dict[name.Name,bytes]) -> Dict[str, str]: + ... diff --git a/venv/lib/python3.10/site-packages/dns/ttl.py b/venv/lib/python3.10/site-packages/dns/ttl.py new file mode 100644 index 0000000..df92b2b --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/ttl.py @@ -0,0 +1,90 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS TTL conversion.""" + +import dns.exception + +# Technically TTLs are supposed to be between 0 and 2**31 - 1, with values +# greater than that interpreted as 0, but we do not impose this policy here +# as values > 2**31 - 1 occur in real world data. +# +# We leave it to applications to impose tighter bounds if desired. +MAX_TTL = 2**32 - 1 + + +class BadTTL(dns.exception.SyntaxError): + """DNS TTL value is not well-formed.""" + + +def from_text(text): + """Convert the text form of a TTL to an integer. + + The BIND 8 units syntax for TTLs (e.g. '1w6d4h3m10s') is supported. + + *text*, a ``str``, the textual TTL. + + Raises ``dns.ttl.BadTTL`` if the TTL is not well-formed. + + Returns an ``int``. + """ + + if text.isdigit(): + total = int(text) + elif len(text) == 0: + raise BadTTL + else: + total = 0 + current = 0 + need_digit = True + for c in text: + if c.isdigit(): + current *= 10 + current += int(c) + need_digit = False + else: + if need_digit: + raise BadTTL + c = c.lower() + if c == 'w': + total += current * 604800 + elif c == 'd': + total += current * 86400 + elif c == 'h': + total += current * 3600 + elif c == 'm': + total += current * 60 + elif c == 's': + total += current + else: + raise BadTTL("unknown unit '%s'" % c) + current = 0 + need_digit = True + if not current == 0: + raise BadTTL("trailing integer") + if total < 0 or total > MAX_TTL: + raise BadTTL("TTL should be between 0 and 2**32 - 1 (inclusive)") + return total + + +def make(value): + if isinstance(value, int): + return value + elif isinstance(value, str): + return dns.ttl.from_text(value) + else: + raise ValueError('cannot convert value to TTL') diff --git a/venv/lib/python3.10/site-packages/dns/update.py b/venv/lib/python3.10/site-packages/dns/update.py new file mode 100644 index 0000000..a541af2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/update.py @@ -0,0 +1,319 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Dynamic Update Support""" + + +import dns.message +import dns.name +import dns.opcode +import dns.rdata +import dns.rdataclass +import dns.rdataset +import dns.tsig + + +class UpdateSection(dns.enum.IntEnum): + """Update sections""" + ZONE = 0 + PREREQ = 1 + UPDATE = 2 + ADDITIONAL = 3 + + @classmethod + def _maximum(cls): + return 3 + + +class UpdateMessage(dns.message.Message): + + _section_enum = UpdateSection + + def __init__(self, zone=None, rdclass=dns.rdataclass.IN, keyring=None, + keyname=None, keyalgorithm=dns.tsig.default_algorithm, + id=None): + """Initialize a new DNS Update object. + + See the documentation of the Message class for a complete + description of the keyring dictionary. + + *zone*, a ``dns.name.Name``, ``str``, or ``None``, the zone + which is being updated. ``None`` should only be used by dnspython's + message constructors, as a zone is required for the convenience + methods like ``add()``, ``replace()``, etc. + + *rdclass*, an ``int`` or ``str``, the class of the zone. + + The *keyring*, *keyname*, and *keyalgorithm* parameters are passed to + ``use_tsig()``; see its documentation for details. + """ + super().__init__(id=id) + self.flags |= dns.opcode.to_flags(dns.opcode.UPDATE) + if isinstance(zone, str): + zone = dns.name.from_text(zone) + self.origin = zone + rdclass = dns.rdataclass.RdataClass.make(rdclass) + self.zone_rdclass = rdclass + if self.origin: + self.find_rrset(self.zone, self.origin, rdclass, dns.rdatatype.SOA, + create=True, force_unique=True) + if keyring is not None: + self.use_tsig(keyring, keyname, algorithm=keyalgorithm) + + @property + def zone(self): + """The zone section.""" + return self.sections[0] + + @zone.setter + def zone(self, v): + self.sections[0] = v + + @property + def prerequisite(self): + """The prerequisite section.""" + return self.sections[1] + + @prerequisite.setter + def prerequisite(self, v): + self.sections[1] = v + + @property + def update(self): + """The update section.""" + return self.sections[2] + + @update.setter + def update(self, v): + self.sections[2] = v + + def _add_rr(self, name, ttl, rd, deleting=None, section=None): + """Add a single RR to the update section.""" + + if section is None: + section = self.update + covers = rd.covers() + rrset = self.find_rrset(section, name, self.zone_rdclass, rd.rdtype, + covers, deleting, True, True) + rrset.add(rd, ttl) + + def _add(self, replace, section, name, *args): + """Add records. + + *replace* is the replacement mode. If ``False``, + RRs are added to an existing RRset; if ``True``, the RRset + is replaced with the specified contents. The second + argument is the section to add to. The third argument + is always a name. The other arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if isinstance(args[0], dns.rdataset.Rdataset): + for rds in args: + if replace: + self.delete(name, rds.rdtype) + for rd in rds: + self._add_rr(name, rds.ttl, rd, section=section) + else: + args = list(args) + ttl = int(args.pop(0)) + if isinstance(args[0], dns.rdata.Rdata): + if replace: + self.delete(name, args[0].rdtype) + for rd in args: + self._add_rr(name, ttl, rd, section=section) + else: + rdtype = dns.rdatatype.RdataType.make(args.pop(0)) + if replace: + self.delete(name, rdtype) + for s in args: + rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s, + self.origin) + self._add_rr(name, ttl, rd, section=section) + + def add(self, name, *args): + """Add records. + + The first argument is always a name. The other + arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + """ + + self._add(False, self.update, name, *args) + + def delete(self, name, *args): + """Delete records. + + The first argument is always a name. The other + arguments can be: + + - *empty* + + - rdataset... + + - rdata... + + - rdtype, [string...] + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if len(args) == 0: + self.find_rrset(self.update, name, dns.rdataclass.ANY, + dns.rdatatype.ANY, dns.rdatatype.NONE, + dns.rdatatype.ANY, True, True) + elif isinstance(args[0], dns.rdataset.Rdataset): + for rds in args: + for rd in rds: + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + else: + args = list(args) + if isinstance(args[0], dns.rdata.Rdata): + for rd in args: + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + else: + rdtype = dns.rdatatype.RdataType.make(args.pop(0)) + if len(args) == 0: + self.find_rrset(self.update, name, + self.zone_rdclass, rdtype, + dns.rdatatype.NONE, + dns.rdataclass.ANY, + True, True) + else: + for s in args: + rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s, + self.origin) + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + + def replace(self, name, *args): + """Replace records. + + The first argument is always a name. The other + arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + + Note that if you want to replace the entire node, you should do + a delete of the name followed by one or more calls to add. + """ + + self._add(True, self.update, name, *args) + + def present(self, name, *args): + """Require that an owner name (and optionally an rdata type, + or specific rdataset) exists as a prerequisite to the + execution of the update. + + The first argument is always a name. + The other arguments can be: + + - rdataset... + + - rdata... + + - rdtype, string... + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if len(args) == 0: + self.find_rrset(self.prerequisite, name, + dns.rdataclass.ANY, dns.rdatatype.ANY, + dns.rdatatype.NONE, None, + True, True) + elif isinstance(args[0], dns.rdataset.Rdataset) or \ + isinstance(args[0], dns.rdata.Rdata) or \ + len(args) > 1: + if not isinstance(args[0], dns.rdataset.Rdataset): + # Add a 0 TTL + args = list(args) + args.insert(0, 0) + self._add(False, self.prerequisite, name, *args) + else: + rdtype = dns.rdatatype.RdataType.make(args[0]) + self.find_rrset(self.prerequisite, name, + dns.rdataclass.ANY, rdtype, + dns.rdatatype.NONE, None, + True, True) + + def absent(self, name, rdtype=None): + """Require that an owner name (and optionally an rdata type) does + not exist as a prerequisite to the execution of the update.""" + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if rdtype is None: + self.find_rrset(self.prerequisite, name, + dns.rdataclass.NONE, dns.rdatatype.ANY, + dns.rdatatype.NONE, None, + True, True) + else: + rdtype = dns.rdatatype.RdataType.make(rdtype) + self.find_rrset(self.prerequisite, name, + dns.rdataclass.NONE, rdtype, + dns.rdatatype.NONE, None, + True, True) + + def _get_one_rr_per_rrset(self, value): + # Updates are always one_rr_per_rrset + return True + + def _parse_rr_header(self, section, name, rdclass, rdtype): + deleting = None + empty = False + if section == UpdateSection.ZONE: + if dns.rdataclass.is_metaclass(rdclass) or \ + rdtype != dns.rdatatype.SOA or \ + self.zone: + raise dns.exception.FormError + else: + if not self.zone: + raise dns.exception.FormError + if rdclass in (dns.rdataclass.ANY, dns.rdataclass.NONE): + deleting = rdclass + rdclass = self.zone[0].rdclass + empty = (deleting == dns.rdataclass.ANY or + section == UpdateSection.PREREQ) + return (rdclass, rdtype, deleting, empty) + +# backwards compatibility +Update = UpdateMessage + +### BEGIN generated UpdateSection constants + +ZONE = UpdateSection.ZONE +PREREQ = UpdateSection.PREREQ +UPDATE = UpdateSection.UPDATE +ADDITIONAL = UpdateSection.ADDITIONAL + +### END generated UpdateSection constants diff --git a/venv/lib/python3.10/site-packages/dns/update.pyi b/venv/lib/python3.10/site-packages/dns/update.pyi new file mode 100644 index 0000000..eeac059 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/update.pyi @@ -0,0 +1,21 @@ +from typing import Optional,Dict,Union,Any + +from . import message, tsig, rdataclass, name + +class Update(message.Message): + def __init__(self, zone : Union[name.Name, str], rdclass : Union[int,str] = rdataclass.IN, keyring : Optional[Dict[name.Name,bytes]] = None, + keyname : Optional[name.Name] = None, keyalgorithm : Optional[name.Name] = tsig.default_algorithm) -> None: + self.id : int + def add(self, name : Union[str,name.Name], *args : Any): + ... + def delete(self, name, *args : Any): + ... + def replace(self, name : Union[str,name.Name], *args : Any): + ... + def present(self, name : Union[str,name.Name], *args : Any): + ... + def absent(self, name : Union[str,name.Name], rdtype=None): + """Require that an owner name (and optionally an rdata type) does + not exist as a prerequisite to the execution of the update.""" + def to_wire(self, origin : Optional[name.Name] = None, max_size=65535, **kw) -> bytes: + ... diff --git a/venv/lib/python3.10/site-packages/dns/version.py b/venv/lib/python3.10/site-packages/dns/version.py new file mode 100644 index 0000000..6501787 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/version.py @@ -0,0 +1,46 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""dnspython release version information.""" + +#: MAJOR +MAJOR = 2 +#: MINOR +MINOR = 2 +#: MICRO +MICRO = 1 +#: RELEASELEVEL +RELEASELEVEL = 0x0f +#: SERIAL +SERIAL = 0 + +if RELEASELEVEL == 0x0f: # pragma: no cover + #: version + version = '%d.%d.%d' % (MAJOR, MINOR, MICRO) +elif RELEASELEVEL == 0x00: # pragma: no cover + version = '%d.%d.%ddev%d' % \ + (MAJOR, MINOR, MICRO, SERIAL) +elif RELEASELEVEL == 0x0c: # pragma: no cover + version = '%d.%d.%drc%d' % \ + (MAJOR, MINOR, MICRO, SERIAL) +else: # pragma: no cover + version = '%d.%d.%d%x%d' % \ + (MAJOR, MINOR, MICRO, RELEASELEVEL, SERIAL) + +#: hexversion +hexversion = MAJOR << 24 | MINOR << 16 | MICRO << 8 | RELEASELEVEL << 4 | \ + SERIAL diff --git a/venv/lib/python3.10/site-packages/dns/versioned.py b/venv/lib/python3.10/site-packages/dns/versioned.py new file mode 100644 index 0000000..8b6c275 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/versioned.py @@ -0,0 +1,274 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""DNS Versioned Zones.""" + +import collections +try: + import threading as _threading +except ImportError: # pragma: no cover + import dummy_threading as _threading # type: ignore + +import dns.exception +import dns.immutable +import dns.name +import dns.rdataclass +import dns.rdatatype +import dns.rdtypes.ANY.SOA +import dns.zone + + +class UseTransaction(dns.exception.DNSException): + """To alter a versioned zone, use a transaction.""" + + +# Backwards compatibility +Node = dns.zone.VersionedNode +ImmutableNode = dns.zone.ImmutableVersionedNode +Version = dns.zone.Version +WritableVersion = dns.zone.WritableVersion +ImmutableVersion = dns.zone.ImmutableVersion +Transaction = dns.zone.Transaction + + +class Zone(dns.zone.Zone): + + __slots__ = ['_versions', '_versions_lock', '_write_txn', + '_write_waiters', '_write_event', '_pruning_policy', + '_readers'] + + node_factory = Node + + def __init__(self, origin, rdclass=dns.rdataclass.IN, relativize=True, + pruning_policy=None): + """Initialize a versioned zone object. + + *origin* is the origin of the zone. It may be a ``dns.name.Name``, + a ``str``, or ``None``. If ``None``, then the zone's origin will + be set by the first ``$ORIGIN`` line in a zone file. + + *rdclass*, an ``int``, the zone's rdata class; the default is class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + + *pruning policy*, a function taking a `Version` and returning + a `bool`, or `None`. Should the version be pruned? If `None`, + the default policy, which retains one version is used. + """ + super().__init__(origin, rdclass, relativize) + self._versions = collections.deque() + self._version_lock = _threading.Lock() + if pruning_policy is None: + self._pruning_policy = self._default_pruning_policy + else: + self._pruning_policy = pruning_policy + self._write_txn = None + self._write_event = None + self._write_waiters = collections.deque() + self._readers = set() + self._commit_version_unlocked(None, + WritableVersion(self, replacement=True), + origin) + + def reader(self, id=None, serial=None): # pylint: disable=arguments-differ + if id is not None and serial is not None: + raise ValueError('cannot specify both id and serial') + with self._version_lock: + if id is not None: + version = None + for v in reversed(self._versions): + if v.id == id: + version = v + break + if version is None: + raise KeyError('version not found') + elif serial is not None: + if self.relativize: + oname = dns.name.empty + else: + oname = self.origin + version = None + for v in reversed(self._versions): + n = v.nodes.get(oname) + if n: + rds = n.get_rdataset(self.rdclass, dns.rdatatype.SOA) + if rds and rds[0].serial == serial: + version = v + break + if version is None: + raise KeyError('serial not found') + else: + version = self._versions[-1] + txn = Transaction(self, False, version) + self._readers.add(txn) + return txn + + def writer(self, replacement=False): + event = None + while True: + with self._version_lock: + # Checking event == self._write_event ensures that either + # no one was waiting before we got lucky and found no write + # txn, or we were the one who was waiting and got woken up. + # This prevents "taking cuts" when creating a write txn. + if self._write_txn is None and event == self._write_event: + # Creating the transaction defers version setup + # (i.e. copying the nodes dictionary) until we + # give up the lock, so that we hold the lock as + # short a time as possible. This is why we call + # _setup_version() below. + self._write_txn = Transaction(self, replacement, + make_immutable=True) + # give up our exclusive right to make a Transaction + self._write_event = None + break + # Someone else is writing already, so we will have to + # wait, but we want to do the actual wait outside the + # lock. + event = _threading.Event() + self._write_waiters.append(event) + # wait (note we gave up the lock!) + # + # We only wake one sleeper at a time, so it's important + # that no event waiter can exit this method (e.g. via + # cancellation) without returning a transaction or waking + # someone else up. + # + # This is not a problem with Threading module threads as + # they cannot be canceled, but could be an issue with trio + # or curio tasks when we do the async version of writer(). + # I.e. we'd need to do something like: + # + # try: + # event.wait() + # except trio.Cancelled: + # with self._version_lock: + # self._maybe_wakeup_one_waiter_unlocked() + # raise + # + event.wait() + # Do the deferred version setup. + self._write_txn._setup_version() + return self._write_txn + + def _maybe_wakeup_one_waiter_unlocked(self): + if len(self._write_waiters) > 0: + self._write_event = self._write_waiters.popleft() + self._write_event.set() + + # pylint: disable=unused-argument + def _default_pruning_policy(self, zone, version): + return True + # pylint: enable=unused-argument + + def _prune_versions_unlocked(self): + assert len(self._versions) > 0 + # Don't ever prune a version greater than or equal to one that + # a reader has open. This pins versions in memory while the + # reader is open, and importantly lets the reader open a txn on + # a successor version (e.g. if generating an IXFR). + # + # Note our definition of least_kept also ensures we do not try to + # delete the greatest version. + if len(self._readers) > 0: + least_kept = min(txn.version.id for txn in self._readers) + else: + least_kept = self._versions[-1].id + while self._versions[0].id < least_kept and \ + self._pruning_policy(self, self._versions[0]): + self._versions.popleft() + + def set_max_versions(self, max_versions): + """Set a pruning policy that retains up to the specified number + of versions + """ + if max_versions is not None and max_versions < 1: + raise ValueError('max versions must be at least 1') + if max_versions is None: + def policy(*_): + return False + else: + def policy(zone, _): + return len(zone._versions) > max_versions + self.set_pruning_policy(policy) + + def set_pruning_policy(self, policy): + """Set the pruning policy for the zone. + + The *policy* function takes a `Version` and returns `True` if + the version should be pruned, and `False` otherwise. `None` + may also be specified for policy, in which case the default policy + is used. + + Pruning checking proceeds from the least version and the first + time the function returns `False`, the checking stops. I.e. the + retained versions are always a consecutive sequence. + """ + if policy is None: + policy = self._default_pruning_policy + with self._version_lock: + self._pruning_policy = policy + self._prune_versions_unlocked() + + def _end_read(self, txn): + with self._version_lock: + self._readers.remove(txn) + self._prune_versions_unlocked() + + def _end_write_unlocked(self, txn): + assert self._write_txn == txn + self._write_txn = None + self._maybe_wakeup_one_waiter_unlocked() + + def _end_write(self, txn): + with self._version_lock: + self._end_write_unlocked(txn) + + def _commit_version_unlocked(self, txn, version, origin): + self._versions.append(version) + self._prune_versions_unlocked() + self.nodes = version.nodes + if self.origin is None: + self.origin = origin + # txn can be None in __init__ when we make the empty version. + if txn is not None: + self._end_write_unlocked(txn) + + def _commit_version(self, txn, version, origin): + with self._version_lock: + self._commit_version_unlocked(txn, version, origin) + + def _get_next_version_id(self): + if len(self._versions) > 0: + id = self._versions[-1].id + 1 + else: + id = 1 + return id + + def find_node(self, name, create=False): + if create: + raise UseTransaction + return super().find_node(name) + + def delete_node(self, name): + raise UseTransaction + + def find_rdataset(self, name, rdtype, covers=dns.rdatatype.NONE, + create=False): + if create: + raise UseTransaction + rdataset = super().find_rdataset(name, rdtype, covers) + return dns.rdataset.ImmutableRdataset(rdataset) + + def get_rdataset(self, name, rdtype, covers=dns.rdatatype.NONE, + create=False): + if create: + raise UseTransaction + rdataset = super().get_rdataset(name, rdtype, covers) + return dns.rdataset.ImmutableRdataset(rdataset) + + def delete_rdataset(self, name, rdtype, covers=dns.rdatatype.NONE): + raise UseTransaction + + def replace_rdataset(self, name, replacement): + raise UseTransaction diff --git a/venv/lib/python3.10/site-packages/dns/win32util.py b/venv/lib/python3.10/site-packages/dns/win32util.py new file mode 100644 index 0000000..745317a --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/win32util.py @@ -0,0 +1,235 @@ +import sys + +if sys.platform == 'win32': + + import dns.name + + _prefer_wmi = True + + import winreg + + try: + try: + import threading as _threading + except ImportError: # pragma: no cover + import dummy_threading as _threading # type: ignore + import pythoncom + import wmi + _have_wmi = True + except Exception: + _have_wmi = False + + def _config_domain(domain): + # Sometimes DHCP servers add a '.' prefix to the default domain, and + # Windows just stores such values in the registry (see #687). + # Check for this and fix it. + if domain.startswith('.'): + domain = domain[1:] + return dns.name.from_text(domain) + + class DnsInfo: + def __init__(self): + self.domain = None + self.nameservers = [] + self.search = [] + + if _have_wmi: + class _WMIGetter(_threading.Thread): + def __init__(self): + super().__init__() + self.info = DnsInfo() + + def run(self): + pythoncom.CoInitialize() + try: + system = wmi.WMI() + for interface in system.Win32_NetworkAdapterConfiguration(): + if interface.IPEnabled: + self.info.domain = _config_domain(interface.DNSDomain) + self.info.nameservers = list(interface.DNSServerSearchOrder) + self.info.search = [dns.name.from_text(x) for x in + interface.DNSDomainSuffixSearchOrder] + break + finally: + pythoncom.CoUninitialize() + + def get(self): + # We always run in a separate thread to avoid any issues with + # the COM threading model. + self.start() + self.join() + return self.info + else: + class _WMIGetter: + pass + + + class _RegistryGetter: + def __init__(self): + self.info = DnsInfo() + + def _determine_split_char(self, entry): + # + # The windows registry irritatingly changes the list element + # delimiter in between ' ' and ',' (and vice-versa) in various + # versions of windows. + # + if entry.find(' ') >= 0: + split_char = ' ' + elif entry.find(',') >= 0: + split_char = ',' + else: + # probably a singleton; treat as a space-separated list. + split_char = ' ' + return split_char + + def _config_nameservers(self, nameservers): + split_char = self._determine_split_char(nameservers) + ns_list = nameservers.split(split_char) + for ns in ns_list: + if ns not in self.info.nameservers: + self.info.nameservers.append(ns) + + def _config_search(self, search): + split_char = self._determine_split_char(search) + search_list = search.split(split_char) + for s in search_list: + s = dns.name.from_text(s) + if s not in self.info.search: + self.info.search.append(s) + + def _config_fromkey(self, key, always_try_domain): + try: + servers, _ = winreg.QueryValueEx(key, 'NameServer') + except WindowsError: + servers = None + if servers: + self._config_nameservers(servers) + if servers or always_try_domain: + try: + dom, _ = winreg.QueryValueEx(key, 'Domain') + if dom: + self.info.domain = _config_domain(dom) + except WindowsError: + pass + else: + try: + servers, _ = winreg.QueryValueEx(key, 'DhcpNameServer') + except WindowsError: + servers = None + if servers: + self._config_nameservers(servers) + try: + dom, _ = winreg.QueryValueEx(key, 'DhcpDomain') + if dom: + self.info.domain = _config_domain(dom) + except WindowsError: + pass + try: + search, _ = winreg.QueryValueEx(key, 'SearchList') + except WindowsError: + search = None + if search is None: + try: + search, _ = winreg.QueryValueEx(key, 'DhcpSearchList') + except WindowsError: + search = None + if search: + self._config_search(search) + + def _is_nic_enabled(self, lm, guid): + # Look in the Windows Registry to determine whether the network + # interface corresponding to the given guid is enabled. + # + # (Code contributed by Paul Marks, thanks!) + # + try: + # This hard-coded location seems to be consistent, at least + # from Windows 2000 through Vista. + connection_key = winreg.OpenKey( + lm, + r'SYSTEM\CurrentControlSet\Control\Network' + r'\{4D36E972-E325-11CE-BFC1-08002BE10318}' + r'\%s\Connection' % guid) + + try: + # The PnpInstanceID points to a key inside Enum + (pnp_id, ttype) = winreg.QueryValueEx( + connection_key, 'PnpInstanceID') + + if ttype != winreg.REG_SZ: + raise ValueError # pragma: no cover + + device_key = winreg.OpenKey( + lm, r'SYSTEM\CurrentControlSet\Enum\%s' % pnp_id) + + try: + # Get ConfigFlags for this device + (flags, ttype) = winreg.QueryValueEx( + device_key, 'ConfigFlags') + + if ttype != winreg.REG_DWORD: + raise ValueError # pragma: no cover + + # Based on experimentation, bit 0x1 indicates that the + # device is disabled. + # + # XXXRTH I suspect we really want to & with 0x03 so + # that CONFIGFLAGS_REMOVED devices are also ignored, + # but we're shifting to WMI as ConfigFlags is not + # supposed to be used. + return not flags & 0x1 + + finally: + device_key.Close() + finally: + connection_key.Close() + except Exception: # pragma: no cover + return False + + def get(self): + """Extract resolver configuration from the Windows registry.""" + + lm = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + try: + tcp_params = winreg.OpenKey(lm, + r'SYSTEM\CurrentControlSet' + r'\Services\Tcpip\Parameters') + try: + self._config_fromkey(tcp_params, True) + finally: + tcp_params.Close() + interfaces = winreg.OpenKey(lm, + r'SYSTEM\CurrentControlSet' + r'\Services\Tcpip\Parameters' + r'\Interfaces') + try: + i = 0 + while True: + try: + guid = winreg.EnumKey(interfaces, i) + i += 1 + key = winreg.OpenKey(interfaces, guid) + try: + if not self._is_nic_enabled(lm, guid): + continue + self._config_fromkey(key, False) + finally: + key.Close() + except EnvironmentError: + break + finally: + interfaces.Close() + finally: + lm.Close() + return self.info + + if _have_wmi and _prefer_wmi: + _getter_class = _WMIGetter + else: + _getter_class = _RegistryGetter + + def get_dns_info(): + """Extract resolver configuration.""" + getter = _getter_class() + return getter.get() diff --git a/venv/lib/python3.10/site-packages/dns/wire.py b/venv/lib/python3.10/site-packages/dns/wire.py new file mode 100644 index 0000000..572e27e --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/wire.py @@ -0,0 +1,85 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import contextlib +import struct + +import dns.exception +import dns.name + +class Parser: + def __init__(self, wire, current=0): + self.wire = wire + self.current = 0 + self.end = len(self.wire) + if current: + self.seek(current) + self.furthest = current + + def remaining(self): + return self.end - self.current + + def get_bytes(self, size): + if size > self.remaining(): + raise dns.exception.FormError + output = self.wire[self.current:self.current + size] + self.current += size + self.furthest = max(self.furthest, self.current) + return output + + def get_counted_bytes(self, length_size=1): + length = int.from_bytes(self.get_bytes(length_size), 'big') + return self.get_bytes(length) + + def get_remaining(self): + return self.get_bytes(self.remaining()) + + def get_uint8(self): + return struct.unpack('!B', self.get_bytes(1))[0] + + def get_uint16(self): + return struct.unpack('!H', self.get_bytes(2))[0] + + def get_uint32(self): + return struct.unpack('!I', self.get_bytes(4))[0] + + def get_uint48(self): + return int.from_bytes(self.get_bytes(6), 'big') + + def get_struct(self, format): + return struct.unpack(format, self.get_bytes(struct.calcsize(format))) + + def get_name(self, origin=None): + name = dns.name.from_wire_parser(self) + if origin: + name = name.relativize(origin) + return name + + def seek(self, where): + # Note that seeking to the end is OK! (If you try to read + # after such a seek, you'll get an exception as expected.) + if where < 0 or where > self.end: + raise dns.exception.FormError + self.current = where + + @contextlib.contextmanager + def restrict_to(self, size): + if size > self.remaining(): + raise dns.exception.FormError + saved_end = self.end + try: + self.end = self.current + size + yield + # We make this check here and not in the finally as we + # don't want to raise if we're already raising for some + # other reason. + if self.current != self.end: + raise dns.exception.FormError + finally: + self.end = saved_end + + @contextlib.contextmanager + def restore_furthest(self): + try: + yield None + finally: + self.current = self.furthest diff --git a/venv/lib/python3.10/site-packages/dns/xfr.py b/venv/lib/python3.10/site-packages/dns/xfr.py new file mode 100644 index 0000000..cf9a163 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/xfr.py @@ -0,0 +1,313 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.message +import dns.name +import dns.rcode +import dns.serial +import dns.rdatatype +import dns.zone + + +class TransferError(dns.exception.DNSException): + """A zone transfer response got a non-zero rcode.""" + + def __init__(self, rcode): + message = 'Zone transfer error: %s' % dns.rcode.to_text(rcode) + super().__init__(message) + self.rcode = rcode + + +class SerialWentBackwards(dns.exception.FormError): + """The current serial number is less than the serial we know.""" + + +class UseTCP(dns.exception.DNSException): + """This IXFR cannot be completed with UDP.""" + + +class Inbound: + """ + State machine for zone transfers. + """ + + def __init__(self, txn_manager, rdtype=dns.rdatatype.AXFR, + serial=None, is_udp=False): + """Initialize an inbound zone transfer. + + *txn_manager* is a :py:class:`dns.transaction.TransactionManager`. + + *rdtype* can be `dns.rdatatype.AXFR` or `dns.rdatatype.IXFR` + + *serial* is the base serial number for IXFRs, and is required in + that case. + + *is_udp*, a ``bool`` indidicates if UDP is being used for this + XFR. + """ + self.txn_manager = txn_manager + self.txn = None + self.rdtype = rdtype + if rdtype == dns.rdatatype.IXFR: + if serial is None: + raise ValueError('a starting serial must be supplied for IXFRs') + elif is_udp: + raise ValueError('is_udp specified for AXFR') + self.serial = serial + self.is_udp = is_udp + (_, _, self.origin) = txn_manager.origin_information() + self.soa_rdataset = None + self.done = False + self.expecting_SOA = False + self.delete_mode = False + + def process_message(self, message): + """Process one message in the transfer. + + The message should have the same relativization as was specified when + the `dns.xfr.Inbound` was created. The message should also have been + created with `one_rr_per_rrset=True` because order matters. + + Returns `True` if the transfer is complete, and `False` otherwise. + """ + if self.txn is None: + replacement = self.rdtype == dns.rdatatype.AXFR + self.txn = self.txn_manager.writer(replacement) + rcode = message.rcode() + if rcode != dns.rcode.NOERROR: + raise TransferError(rcode) + # + # We don't require a question section, but if it is present is + # should be correct. + # + if len(message.question) > 0: + if message.question[0].name != self.origin: + raise dns.exception.FormError("wrong question name") + if message.question[0].rdtype != self.rdtype: + raise dns.exception.FormError("wrong question rdatatype") + answer_index = 0 + if self.soa_rdataset is None: + # + # This is the first message. We're expecting an SOA at + # the origin. + # + if not message.answer or message.answer[0].name != self.origin: + raise dns.exception.FormError("No answer or RRset not " + "for zone origin") + rrset = message.answer[0] + name = rrset.name + rdataset = rrset + if rdataset.rdtype != dns.rdatatype.SOA: + raise dns.exception.FormError("first RRset is not an SOA") + answer_index = 1 + self.soa_rdataset = rdataset.copy() + if self.rdtype == dns.rdatatype.IXFR: + if self.soa_rdataset[0].serial == self.serial: + # + # We're already up-to-date. + # + self.done = True + elif dns.serial.Serial(self.soa_rdataset[0].serial) < \ + self.serial: + # It went backwards! + raise SerialWentBackwards + else: + if self.is_udp and len(message.answer[answer_index:]) == 0: + # + # There are no more records, so this is the + # "truncated" response. Say to use TCP + # + raise UseTCP + # + # Note we're expecting another SOA so we can detect + # if this IXFR response is an AXFR-style response. + # + self.expecting_SOA = True + # + # Process the answer section (other than the initial SOA in + # the first message). + # + for rrset in message.answer[answer_index:]: + name = rrset.name + rdataset = rrset + if self.done: + raise dns.exception.FormError("answers after final SOA") + if rdataset.rdtype == dns.rdatatype.SOA and \ + name == self.origin: + # + # Every time we see an origin SOA delete_mode inverts + # + if self.rdtype == dns.rdatatype.IXFR: + self.delete_mode = not self.delete_mode + # + # If this SOA Rdataset is equal to the first we saw + # then we're finished. If this is an IXFR we also + # check that we're seeing the record in the expected + # part of the response. + # + if rdataset == self.soa_rdataset and \ + (self.rdtype == dns.rdatatype.AXFR or + (self.rdtype == dns.rdatatype.IXFR and + self.delete_mode)): + # + # This is the final SOA + # + if self.expecting_SOA: + # We got an empty IXFR sequence! + raise dns.exception.FormError('empty IXFR sequence') + if self.rdtype == dns.rdatatype.IXFR \ + and self.serial != rdataset[0].serial: + raise dns.exception.FormError('unexpected end of IXFR ' + 'sequence') + self.txn.replace(name, rdataset) + self.txn.commit() + self.txn = None + self.done = True + else: + # + # This is not the final SOA + # + self.expecting_SOA = False + if self.rdtype == dns.rdatatype.IXFR: + if self.delete_mode: + # This is the start of an IXFR deletion set + if rdataset[0].serial != self.serial: + raise dns.exception.FormError( + "IXFR base serial mismatch") + else: + # This is the start of an IXFR addition set + self.serial = rdataset[0].serial + self.txn.replace(name, rdataset) + else: + # We saw a non-final SOA for the origin in an AXFR. + raise dns.exception.FormError('unexpected origin SOA ' + 'in AXFR') + continue + if self.expecting_SOA: + # + # We made an IXFR request and are expecting another + # SOA RR, but saw something else, so this must be an + # AXFR response. + # + self.rdtype = dns.rdatatype.AXFR + self.expecting_SOA = False + self.delete_mode = False + self.txn.rollback() + self.txn = self.txn_manager.writer(True) + # + # Note we are falling through into the code below + # so whatever rdataset this was gets written. + # + # Add or remove the data + if self.delete_mode: + self.txn.delete_exact(name, rdataset) + else: + self.txn.add(name, rdataset) + if self.is_udp and not self.done: + # + # This is a UDP IXFR and we didn't get to done, and we didn't + # get the proper "truncated" response + # + raise dns.exception.FormError('unexpected end of UDP IXFR') + return self.done + + # + # Inbounds are context managers. + # + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.txn: + self.txn.rollback() + return False + + +def make_query(txn_manager, serial=0, + use_edns=None, ednsflags=None, payload=None, + request_payload=None, options=None, + keyring=None, keyname=None, + keyalgorithm=dns.tsig.default_algorithm): + """Make an AXFR or IXFR query. + + *txn_manager* is a ``dns.transaction.TransactionManager``, typically a + ``dns.zone.Zone``. + + *serial* is an ``int`` or ``None``. If 0, then IXFR will be + attempted using the most recent serial number from the + *txn_manager*; it is the caller's responsibility to ensure there + are no write transactions active that could invalidate the + retrieved serial. If a serial cannot be determined, AXFR will be + forced. Other integer values are the starting serial to use. + ``None`` forces an AXFR. + + Please see the documentation for :py:func:`dns.message.make_query` and + :py:func:`dns.message.Message.use_tsig` for details on the other parameters + to this function. + + Returns a `(query, serial)` tuple. + """ + (zone_origin, _, origin) = txn_manager.origin_information() + if serial is None: + rdtype = dns.rdatatype.AXFR + elif not isinstance(serial, int): + raise ValueError('serial is not an integer') + elif serial == 0: + with txn_manager.reader() as txn: + rdataset = txn.get(origin, 'SOA') + if rdataset: + serial = rdataset[0].serial + rdtype = dns.rdatatype.IXFR + else: + serial = None + rdtype = dns.rdatatype.AXFR + elif serial > 0 and serial < 4294967296: + rdtype = dns.rdatatype.IXFR + else: + raise ValueError('serial out-of-range') + rdclass = txn_manager.get_class() + q = dns.message.make_query(zone_origin, rdtype, rdclass, + use_edns, False, ednsflags, payload, + request_payload, options) + if serial is not None: + rdata = dns.rdata.from_text(rdclass, 'SOA', f'. . {serial} 0 0 0 0') + rrset = q.find_rrset(q.authority, zone_origin, rdclass, + dns.rdatatype.SOA, create=True) + rrset.add(rdata, 0) + if keyring is not None: + q.use_tsig(keyring, keyname, algorithm=keyalgorithm) + return (q, serial) + +def extract_serial_from_query(query): + """Extract the SOA serial number from query if it is an IXFR and return + it, otherwise return None. + + *query* is a dns.message.QueryMessage that is an IXFR or AXFR request. + + Raises if the query is not an IXFR or AXFR, or if an IXFR doesn't have + an appropriate SOA RRset in the authority section.""" + + question = query.question[0] + if question.rdtype == dns.rdatatype.AXFR: + return None + elif question.rdtype != dns.rdatatype.IXFR: + raise ValueError("query is not an AXFR or IXFR") + soa = query.find_rrset(query.authority, question.name, question.rdclass, + dns.rdatatype.SOA) + return soa[0].serial diff --git a/venv/lib/python3.10/site-packages/dns/zone.py b/venv/lib/python3.10/site-packages/dns/zone.py new file mode 100644 index 0000000..5a64940 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/zone.py @@ -0,0 +1,1225 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Zones.""" + +import contextlib +import hashlib +import io +import os +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.node +import dns.rdataclass +import dns.rdatatype +import dns.rdata +import dns.rdtypes.ANY.SOA +import dns.rdtypes.ANY.ZONEMD +import dns.rrset +import dns.tokenizer +import dns.transaction +import dns.ttl +import dns.grange +import dns.zonefile + + +class BadZone(dns.exception.DNSException): + + """The DNS zone is malformed.""" + + +class NoSOA(BadZone): + + """The DNS zone has no SOA RR at its origin.""" + + +class NoNS(BadZone): + + """The DNS zone has no NS RRset at its origin.""" + + +class UnknownOrigin(BadZone): + + """The DNS zone's origin is unknown.""" + + +class UnsupportedDigestScheme(dns.exception.DNSException): + + """The zone digest's scheme is unsupported.""" + + +class UnsupportedDigestHashAlgorithm(dns.exception.DNSException): + + """The zone digest's origin is unsupported.""" + + +class NoDigest(dns.exception.DNSException): + + """The DNS zone has no ZONEMD RRset at its origin.""" + + +class DigestVerificationFailure(dns.exception.DNSException): + + """The ZONEMD digest failed to verify.""" + + +class DigestScheme(dns.enum.IntEnum): + """ZONEMD Scheme""" + + SIMPLE = 1 + + @classmethod + def _maximum(cls): + return 255 + + +class DigestHashAlgorithm(dns.enum.IntEnum): + """ZONEMD Hash Algorithm""" + + SHA384 = 1 + SHA512 = 2 + + @classmethod + def _maximum(cls): + return 255 + + +_digest_hashers = { + DigestHashAlgorithm.SHA384: hashlib.sha384, + DigestHashAlgorithm.SHA512: hashlib.sha512, +} + + +class Zone(dns.transaction.TransactionManager): + + """A DNS zone. + + A ``Zone`` is a mapping from names to nodes. The zone object may be + treated like a Python dictionary, e.g. ``zone[name]`` will retrieve + the node associated with that name. The *name* may be a + ``dns.name.Name object``, or it may be a string. In either case, + if the name is relative it is treated as relative to the origin of + the zone. + """ + + node_factory = dns.node.Node + + __slots__ = ['rdclass', 'origin', 'nodes', 'relativize'] + + def __init__(self, origin, rdclass=dns.rdataclass.IN, relativize=True): + """Initialize a zone object. + + *origin* is the origin of the zone. It may be a ``dns.name.Name``, + a ``str``, or ``None``. If ``None``, then the zone's origin will + be set by the first ``$ORIGIN`` line in a zone file. + + *rdclass*, an ``int``, the zone's rdata class; the default is class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + """ + + if origin is not None: + if isinstance(origin, str): + origin = dns.name.from_text(origin) + elif not isinstance(origin, dns.name.Name): + raise ValueError("origin parameter must be convertible to a " + "DNS name") + if not origin.is_absolute(): + raise ValueError("origin parameter must be an absolute name") + self.origin = origin + self.rdclass = rdclass + self.nodes = {} + self.relativize = relativize + + def __eq__(self, other): + """Two zones are equal if they have the same origin, class, and + nodes. + + Returns a ``bool``. + """ + + if not isinstance(other, Zone): + return False + if self.rdclass != other.rdclass or \ + self.origin != other.origin or \ + self.nodes != other.nodes: + return False + return True + + def __ne__(self, other): + """Are two zones not equal? + + Returns a ``bool``. + """ + + return not self.__eq__(other) + + def _validate_name(self, name): + if isinstance(name, str): + name = dns.name.from_text(name, None) + elif not isinstance(name, dns.name.Name): + raise KeyError("name parameter must be convertible to a DNS name") + if name.is_absolute(): + if not name.is_subdomain(self.origin): + raise KeyError( + "name parameter must be a subdomain of the zone origin") + if self.relativize: + name = name.relativize(self.origin) + elif not self.relativize: + # We have a relative name in a non-relative zone, so derelativize. + if self.origin is None: + raise KeyError('no zone origin is defined') + name = name.derelativize(self.origin) + return name + + def __getitem__(self, key): + key = self._validate_name(key) + return self.nodes[key] + + def __setitem__(self, key, value): + key = self._validate_name(key) + self.nodes[key] = value + + def __delitem__(self, key): + key = self._validate_name(key) + del self.nodes[key] + + def __iter__(self): + return self.nodes.__iter__() + + def keys(self): + return self.nodes.keys() + + def values(self): + return self.nodes.values() + + def items(self): + return self.nodes.items() + + def get(self, key): + key = self._validate_name(key) + return self.nodes.get(key) + + def __contains__(self, key): + key = self._validate_name(key) + return key in self.nodes + + def find_node(self, name, create=False): + """Find a node in the zone, possibly creating it. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.node.Node``. + """ + + name = self._validate_name(name) + node = self.nodes.get(name) + if node is None: + if not create: + raise KeyError + node = self.node_factory() + self.nodes[name] = node + return node + + def get_node(self, name, create=False): + """Get a node in the zone, possibly creating it. + + This method is like ``find_node()``, except it returns None instead + of raising an exception if the node does not exist and creation + has not been requested. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.node.Node`` or ``None``. + """ + + try: + node = self.find_node(name, create) + except KeyError: + node = None + return node + + def delete_node(self, name): + """Delete the specified node if it exists. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + It is not an error if the node does not exist. + """ + + name = self._validate_name(name) + if name in self.nodes: + del self.nodes[name] + + def find_rdataset(self, name, rdtype, covers=dns.rdatatype.NONE, + create=False): + """Look for an rdataset with the specified name and type in the zone, + and return an rdataset encapsulating it. + + The rdataset returned is not a copy; changes to it will change + the zone. + + KeyError is raised if the name or type are not found. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, an ``int`` or ``str``, the rdata type desired. + + *covers*, an ``int`` or ``str`` or ``None``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rdataset.Rdataset``. + """ + + name = self._validate_name(name) + rdtype = dns.rdatatype.RdataType.make(rdtype) + if covers is not None: + covers = dns.rdatatype.RdataType.make(covers) + node = self.find_node(name, create) + return node.find_rdataset(self.rdclass, rdtype, covers, create) + + def get_rdataset(self, name, rdtype, covers=dns.rdatatype.NONE, + create=False): + """Look for an rdataset with the specified name and type in the zone. + + This method is like ``find_rdataset()``, except it returns None instead + of raising an exception if the rdataset does not exist and creation + has not been requested. + + The rdataset returned is not a copy; changes to it will change + the zone. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, an ``int`` or ``str``, the rdata type desired. + + *covers*, an ``int`` or ``str`` or ``None``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rdataset.Rdataset`` or ``None``. + """ + + try: + rdataset = self.find_rdataset(name, rdtype, covers, create) + except KeyError: + rdataset = None + return rdataset + + def delete_rdataset(self, name, rdtype, covers=dns.rdatatype.NONE): + """Delete the rdataset matching *rdtype* and *covers*, if it + exists at the node specified by *name*. + + It is not an error if the node does not exist, or if there is no + matching rdataset at the node. + + If the node has no rdatasets after the deletion, it will itself + be deleted. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, an ``int`` or ``str``, the rdata type desired. + + *covers*, an ``int`` or ``str`` or ``None``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + """ + + name = self._validate_name(name) + rdtype = dns.rdatatype.RdataType.make(rdtype) + if covers is not None: + covers = dns.rdatatype.RdataType.make(covers) + node = self.get_node(name) + if node is not None: + node.delete_rdataset(self.rdclass, rdtype, covers) + if len(node) == 0: + self.delete_node(name) + + def replace_rdataset(self, name, replacement): + """Replace an rdataset at name. + + It is not an error if there is no rdataset matching I{replacement}. + + Ownership of the *replacement* object is transferred to the zone; + in other words, this method does not store a copy of *replacement* + at the node, it stores *replacement* itself. + + If the node does not exist, it is created. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *replacement*, a ``dns.rdataset.Rdataset``, the replacement rdataset. + """ + + if replacement.rdclass != self.rdclass: + raise ValueError('replacement.rdclass != zone.rdclass') + node = self.find_node(name, True) + node.replace_rdataset(replacement) + + def find_rrset(self, name, rdtype, covers=dns.rdatatype.NONE): + """Look for an rdataset with the specified name and type in the zone, + and return an RRset encapsulating it. + + This method is less efficient than the similar + ``find_rdataset()`` because it creates an RRset instead of + returning the matching rdataset. It may be more convenient + for some uses since it returns an object which binds the owner + name to the rdataset. + + This method may not be used to create new nodes or rdatasets; + use ``find_rdataset`` instead. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, an ``int`` or ``str``, the rdata type desired. + + *covers*, an ``int`` or ``str`` or ``None``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rrset.RRset`` or ``None``. + """ + + name = self._validate_name(name) + rdtype = dns.rdatatype.RdataType.make(rdtype) + if covers is not None: + covers = dns.rdatatype.RdataType.make(covers) + rdataset = self.nodes[name].find_rdataset(self.rdclass, rdtype, covers) + rrset = dns.rrset.RRset(name, self.rdclass, rdtype, covers) + rrset.update(rdataset) + return rrset + + def get_rrset(self, name, rdtype, covers=dns.rdatatype.NONE): + """Look for an rdataset with the specified name and type in the zone, + and return an RRset encapsulating it. + + This method is less efficient than the similar ``get_rdataset()`` + because it creates an RRset instead of returning the matching + rdataset. It may be more convenient for some uses since it + returns an object which binds the owner name to the rdataset. + + This method may not be used to create new nodes or rdatasets; + use ``get_rdataset()`` instead. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, an ``int`` or ``str``, the rdata type desired. + + *covers*, an ``int`` or ``str`` or ``None``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rrset.RRset`` or ``None``. + """ + + try: + rrset = self.find_rrset(name, rdtype, covers) + except KeyError: + rrset = None + return rrset + + def iterate_rdatasets(self, rdtype=dns.rdatatype.ANY, + covers=dns.rdatatype.NONE): + """Return a generator which yields (name, rdataset) tuples for + all rdatasets in the zone which have the specified *rdtype* + and *covers*. If *rdtype* is ``dns.rdatatype.ANY``, the default, + then all rdatasets will be matched. + + *rdtype*, an ``int`` or ``str``, the rdata type desired. + + *covers*, an ``int`` or ``str`` or ``None``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + """ + + rdtype = dns.rdatatype.RdataType.make(rdtype) + if covers is not None: + covers = dns.rdatatype.RdataType.make(covers) + for (name, node) in self.items(): + for rds in node: + if rdtype == dns.rdatatype.ANY or \ + (rds.rdtype == rdtype and rds.covers == covers): + yield (name, rds) + + def iterate_rdatas(self, rdtype=dns.rdatatype.ANY, + covers=dns.rdatatype.NONE): + """Return a generator which yields (name, ttl, rdata) tuples for + all rdatas in the zone which have the specified *rdtype* + and *covers*. If *rdtype* is ``dns.rdatatype.ANY``, the default, + then all rdatas will be matched. + + *rdtype*, an ``int`` or ``str``, the rdata type desired. + + *covers*, an ``int`` or ``str`` or ``None``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + """ + + rdtype = dns.rdatatype.RdataType.make(rdtype) + if covers is not None: + covers = dns.rdatatype.RdataType.make(covers) + for (name, node) in self.items(): + for rds in node: + if rdtype == dns.rdatatype.ANY or \ + (rds.rdtype == rdtype and rds.covers == covers): + for rdata in rds: + yield (name, rds.ttl, rdata) + + def to_file(self, f, sorted=True, relativize=True, nl=None, + want_comments=False, want_origin=False): + """Write a zone to a file. + + *f*, a file or `str`. If *f* is a string, it is treated + as the name of a file to open. + + *sorted*, a ``bool``. If True, the default, then the file + will be written with the names sorted in DNSSEC order from + least to greatest. Otherwise the names will be written in + whatever order they happen to have in the zone's dictionary. + + *relativize*, a ``bool``. If True, the default, then domain + names in the output will be relativized to the zone's origin + if possible. + + *nl*, a ``str`` or None. The end of line string. If not + ``None``, the output will use the platform's native + end-of-line marker (i.e. LF on POSIX, CRLF on Windows). + + *want_comments*, a ``bool``. If ``True``, emit end-of-line comments + as part of writing the file. If ``False``, the default, do not + emit them. + + *want_origin*, a ``bool``. If ``True``, emit a $ORIGIN line at + the start of the file. If ``False``, the default, do not emit + one. + """ + + with contextlib.ExitStack() as stack: + if isinstance(f, str): + f = stack.enter_context(open(f, 'wb')) + + # must be in this way, f.encoding may contain None, or even + # attribute may not be there + file_enc = getattr(f, 'encoding', None) + if file_enc is None: + file_enc = 'utf-8' + + if nl is None: + # binary mode, '\n' is not enough + nl_b = os.linesep.encode(file_enc) + nl = '\n' + elif isinstance(nl, str): + nl_b = nl.encode(file_enc) + else: + nl_b = nl + nl = nl.decode() + + if want_origin: + l = '$ORIGIN ' + self.origin.to_text() + l_b = l.encode(file_enc) + try: + f.write(l_b) + f.write(nl_b) + except TypeError: # textual mode + f.write(l) + f.write(nl) + + if sorted: + names = list(self.keys()) + names.sort() + else: + names = self.keys() + for n in names: + l = self[n].to_text(n, origin=self.origin, + relativize=relativize, + want_comments=want_comments) + l_b = l.encode(file_enc) + + try: + f.write(l_b) + f.write(nl_b) + except TypeError: # textual mode + f.write(l) + f.write(nl) + + def to_text(self, sorted=True, relativize=True, nl=None, + want_comments=False, want_origin=False): + """Return a zone's text as though it were written to a file. + + *sorted*, a ``bool``. If True, the default, then the file + will be written with the names sorted in DNSSEC order from + least to greatest. Otherwise the names will be written in + whatever order they happen to have in the zone's dictionary. + + *relativize*, a ``bool``. If True, the default, then domain + names in the output will be relativized to the zone's origin + if possible. + + *nl*, a ``str`` or None. The end of line string. If not + ``None``, the output will use the platform's native + end-of-line marker (i.e. LF on POSIX, CRLF on Windows). + + *want_comments*, a ``bool``. If ``True``, emit end-of-line comments + as part of writing the file. If ``False``, the default, do not + emit them. + + *want_origin*, a ``bool``. If ``True``, emit a $ORIGIN line at + the start of the output. If ``False``, the default, do not emit + one. + + Returns a ``str``. + """ + temp_buffer = io.StringIO() + self.to_file(temp_buffer, sorted, relativize, nl, want_comments, + want_origin) + return_value = temp_buffer.getvalue() + temp_buffer.close() + return return_value + + def check_origin(self): + """Do some simple checking of the zone's origin. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + """ + if self.relativize: + name = dns.name.empty + else: + name = self.origin + if self.get_rdataset(name, dns.rdatatype.SOA) is None: + raise NoSOA + if self.get_rdataset(name, dns.rdatatype.NS) is None: + raise NoNS + + def _compute_digest(self, hash_algorithm, scheme=DigestScheme.SIMPLE): + hashinfo = _digest_hashers.get(hash_algorithm) + if not hashinfo: + raise UnsupportedDigestHashAlgorithm + if scheme != DigestScheme.SIMPLE: + raise UnsupportedDigestScheme + + if self.relativize: + origin_name = dns.name.empty + else: + origin_name = self.origin + hasher = hashinfo() + for (name, node) in sorted(self.items()): + rrnamebuf = name.to_digestable(self.origin) + for rdataset in sorted(node, + key=lambda rds: (rds.rdtype, rds.covers)): + if name == origin_name and \ + dns.rdatatype.ZONEMD in (rdataset.rdtype, rdataset.covers): + continue + rrfixed = struct.pack('!HHI', rdataset.rdtype, + rdataset.rdclass, rdataset.ttl) + rdatas = [rdata.to_digestable(self.origin) + for rdata in rdataset] + for rdata in sorted(rdatas): + rrlen = struct.pack('!H', len(rdata)) + hasher.update(rrnamebuf + rrfixed + rrlen + rdata) + return hasher.digest() + + def compute_digest(self, hash_algorithm, scheme=DigestScheme.SIMPLE): + if self.relativize: + origin_name = dns.name.empty + else: + origin_name = self.origin + serial = self.get_rdataset(origin_name, dns.rdatatype.SOA)[0].serial + digest = self._compute_digest(hash_algorithm, scheme) + return dns.rdtypes.ANY.ZONEMD.ZONEMD(self.rdclass, + dns.rdatatype.ZONEMD, + serial, scheme, hash_algorithm, + digest) + + def verify_digest(self, zonemd=None): + if zonemd: + digests = [zonemd] + else: + digests = self.get_rdataset(self.origin, dns.rdatatype.ZONEMD) + if digests is None: + raise NoDigest + for digest in digests: + try: + computed = self._compute_digest(digest.hash_algorithm, + digest.scheme) + if computed == digest.digest: + return + except Exception: + pass + raise DigestVerificationFailure + + # TransactionManager methods + + def reader(self): + return Transaction(self, False, + Version(self, 1, self.nodes, self.origin)) + + def writer(self, replacement=False): + txn = Transaction(self, replacement) + txn._setup_version() + return txn + + def origin_information(self): + if self.relativize: + effective = dns.name.empty + else: + effective = self.origin + return (self.origin, self.relativize, effective) + + def get_class(self): + return self.rdclass + + # Transaction methods + + def _end_read(self, txn): + pass + + def _end_write(self, txn): + pass + + def _commit_version(self, _, version, origin): + self.nodes = version.nodes + if self.origin is None: + self.origin = origin + + def _get_next_version_id(self): + # Versions are ephemeral and all have id 1 + return 1 + + +# These classes used to be in dns.versioned, but have moved here so we can use +# the copy-on-write transaction mechanism for both kinds of zones. In a +# regular zone, the version only exists during the transaction, and the nodes +# are regular dns.node.Nodes. + +# A node with a version id. + +class VersionedNode(dns.node.Node): + __slots__ = ['id'] + + def __init__(self): + super().__init__() + # A proper id will get set by the Version + self.id = 0 + + +@dns.immutable.immutable +class ImmutableVersionedNode(VersionedNode): + __slots__ = ['id'] + + def __init__(self, node): + super().__init__() + self.id = node.id + self.rdatasets = tuple( + [dns.rdataset.ImmutableRdataset(rds) for rds in node.rdatasets] + ) + + def find_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE, + create=False): + if create: + raise TypeError("immutable") + return super().find_rdataset(rdclass, rdtype, covers, False) + + def get_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE, + create=False): + if create: + raise TypeError("immutable") + return super().get_rdataset(rdclass, rdtype, covers, False) + + def delete_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE): + raise TypeError("immutable") + + def replace_rdataset(self, replacement): + raise TypeError("immutable") + + def is_immutable(self): + return True + + +class Version: + def __init__(self, zone, id, nodes=None, origin=None): + self.zone = zone + self.id = id + if nodes is not None: + self.nodes = nodes + else: + self.nodes = {} + self.origin = origin + + def _validate_name(self, name): + if name.is_absolute(): + if self.origin is None: + # This should probably never happen as other code (e.g. + # _rr_line) will notice the lack of an origin before us, but + # we check just in case! + raise KeyError('no zone origin is defined') + if not name.is_subdomain(self.origin): + raise KeyError("name is not a subdomain of the zone origin") + if self.zone.relativize: + name = name.relativize(self.origin) + elif not self.zone.relativize: + # We have a relative name in a non-relative zone, so derelativize. + if self.origin is None: + raise KeyError('no zone origin is defined') + name = name.derelativize(self.origin) + return name + + def get_node(self, name): + name = self._validate_name(name) + return self.nodes.get(name) + + def get_rdataset(self, name, rdtype, covers): + node = self.get_node(name) + if node is None: + return None + return node.get_rdataset(self.zone.rdclass, rdtype, covers) + + def items(self): + return self.nodes.items() + + +class WritableVersion(Version): + def __init__(self, zone, replacement=False): + # The zone._versions_lock must be held by our caller in a versioned + # zone. + id = zone._get_next_version_id() + super().__init__(zone, id) + if not replacement: + # We copy the map, because that gives us a simple and thread-safe + # way of doing versions, and we have a garbage collector to help + # us. We only make new node objects if we actually change the + # node. + self.nodes.update(zone.nodes) + # We have to copy the zone origin as it may be None in the first + # version, and we don't want to mutate the zone until we commit. + self.origin = zone.origin + self.changed = set() + + def _maybe_cow(self, name): + name = self._validate_name(name) + node = self.nodes.get(name) + if node is None or name not in self.changed: + new_node = self.zone.node_factory() + if hasattr(new_node, 'id'): + # We keep doing this for backwards compatibility, as earlier + # code used new_node.id != self.id for the "do we need to CoW?" + # test. Now we use the changed set as this works with both + # regular zones and versioned zones. + new_node.id = self.id + if node is not None: + # moo! copy on write! + new_node.rdatasets.extend(node.rdatasets) + self.nodes[name] = new_node + self.changed.add(name) + return new_node + else: + return node + + def delete_node(self, name): + name = self._validate_name(name) + if name in self.nodes: + del self.nodes[name] + self.changed.add(name) + + def put_rdataset(self, name, rdataset): + node = self._maybe_cow(name) + node.replace_rdataset(rdataset) + + def delete_rdataset(self, name, rdtype, covers): + node = self._maybe_cow(name) + node.delete_rdataset(self.zone.rdclass, rdtype, covers) + if len(node) == 0: + del self.nodes[name] + + +@dns.immutable.immutable +class ImmutableVersion(Version): + def __init__(self, version): + # We tell super() that it's a replacement as we don't want it + # to copy the nodes, as we're about to do that with an + # immutable Dict. + super().__init__(version.zone, True) + # set the right id! + self.id = version.id + # keep the origin + self.origin = version.origin + # Make changed nodes immutable + for name in version.changed: + node = version.nodes.get(name) + # it might not exist if we deleted it in the version + if node: + version.nodes[name] = ImmutableVersionedNode(node) + self.nodes = dns.immutable.Dict(version.nodes, True) + + +class Transaction(dns.transaction.Transaction): + + def __init__(self, zone, replacement, version=None, make_immutable=False): + read_only = version is not None + super().__init__(zone, replacement, read_only) + self.version = version + self.make_immutable = make_immutable + + @property + def zone(self): + return self.manager + + def _setup_version(self): + assert self.version is None + self.version = WritableVersion(self.zone, self.replacement) + + def _get_rdataset(self, name, rdtype, covers): + return self.version.get_rdataset(name, rdtype, covers) + + def _put_rdataset(self, name, rdataset): + assert not self.read_only + self.version.put_rdataset(name, rdataset) + + def _delete_name(self, name): + assert not self.read_only + self.version.delete_node(name) + + def _delete_rdataset(self, name, rdtype, covers): + assert not self.read_only + self.version.delete_rdataset(name, rdtype, covers) + + def _name_exists(self, name): + return self.version.get_node(name) is not None + + def _changed(self): + if self.read_only: + return False + else: + return len(self.version.changed) > 0 + + def _end_transaction(self, commit): + if self.read_only: + self.zone._end_read(self) + elif commit and len(self.version.changed) > 0: + if self.make_immutable: + version = ImmutableVersion(self.version) + else: + version = self.version + self.zone._commit_version(self, version, self.version.origin) + else: + # rollback + self.zone._end_write(self) + + def _set_origin(self, origin): + if self.version.origin is None: + self.version.origin = origin + + def _iterate_rdatasets(self): + for (name, node) in self.version.items(): + for rdataset in node: + yield (name, rdataset) + + def _get_node(self, name): + return self.version.get_node(name) + + def _origin_information(self): + (absolute, relativize, effective) = self.manager.origin_information() + if absolute is None and self.version.origin is not None: + # No origin has been committed yet, but we've learned one as part of + # this txn. Use it. + absolute = self.version.origin + if relativize: + effective = dns.name.empty + else: + effective = absolute + return (absolute, relativize, effective) + + +def from_text(text, origin=None, rdclass=dns.rdataclass.IN, + relativize=True, zone_factory=Zone, filename=None, + allow_include=False, check_origin=True, idna_codec=None): + """Build a zone object from a zone file format string. + + *text*, a ``str``, the zone file format input. + + *origin*, a ``dns.name.Name``, a ``str``, or ``None``. The origin + of the zone; if not specified, the first ``$ORIGIN`` statement in the + zone file will determine the origin of the zone. + + *rdclass*, an ``int``, the zone's rdata class; the default is class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + + *zone_factory*, the zone factory to use or ``None``. If ``None``, then + ``dns.zone.Zone`` will be used. The value may be any class or callable + that returns a subclass of ``dns.zone.Zone``. + + *filename*, a ``str`` or ``None``, the filename to emit when + describing where an error occurred; the default is ``''``. + + *allow_include*, a ``bool``. If ``True``, the default, then ``$INCLUDE`` + directives are permitted. If ``False``, then encoutering a ``$INCLUDE`` + will raise a ``SyntaxError`` exception. + + *check_origin*, a ``bool``. If ``True``, the default, then sanity + checks of the origin node will be made by calling the zone's + ``check_origin()`` method. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + + Returns a subclass of ``dns.zone.Zone``. + """ + + # 'text' can also be a file, but we don't publish that fact + # since it's an implementation detail. The official file + # interface is from_file(). + + if filename is None: + filename = '' + zone = zone_factory(origin, rdclass, relativize=relativize) + with zone.writer(True) as txn: + tok = dns.tokenizer.Tokenizer(text, filename, idna_codec=idna_codec) + reader = dns.zonefile.Reader(tok, rdclass, txn, + allow_include=allow_include) + try: + reader.read() + except dns.zonefile.UnknownOrigin: + # for backwards compatibility + raise dns.zone.UnknownOrigin + # Now that we're done reading, do some basic checking of the zone. + if check_origin: + zone.check_origin() + return zone + + +def from_file(f, origin=None, rdclass=dns.rdataclass.IN, + relativize=True, zone_factory=Zone, filename=None, + allow_include=True, check_origin=True): + """Read a zone file and build a zone object. + + *f*, a file or ``str``. If *f* is a string, it is treated + as the name of a file to open. + + *origin*, a ``dns.name.Name``, a ``str``, or ``None``. The origin + of the zone; if not specified, the first ``$ORIGIN`` statement in the + zone file will determine the origin of the zone. + + *rdclass*, an ``int``, the zone's rdata class; the default is class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + + *zone_factory*, the zone factory to use or ``None``. If ``None``, then + ``dns.zone.Zone`` will be used. The value may be any class or callable + that returns a subclass of ``dns.zone.Zone``. + + *filename*, a ``str`` or ``None``, the filename to emit when + describing where an error occurred; the default is ``''``. + + *allow_include*, a ``bool``. If ``True``, the default, then ``$INCLUDE`` + directives are permitted. If ``False``, then encoutering a ``$INCLUDE`` + will raise a ``SyntaxError`` exception. + + *check_origin*, a ``bool``. If ``True``, the default, then sanity + checks of the origin node will be made by calling the zone's + ``check_origin()`` method. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + + Returns a subclass of ``dns.zone.Zone``. + """ + + with contextlib.ExitStack() as stack: + if isinstance(f, str): + if filename is None: + filename = f + f = stack.enter_context(open(f)) + return from_text(f, origin, rdclass, relativize, zone_factory, + filename, allow_include, check_origin) + + +def from_xfr(xfr, zone_factory=Zone, relativize=True, check_origin=True): + """Convert the output of a zone transfer generator into a zone object. + + *xfr*, a generator of ``dns.message.Message`` objects, typically + ``dns.query.xfr()``. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + It is essential that the relativize setting matches the one specified + to the generator. + + *check_origin*, a ``bool``. If ``True``, the default, then sanity + checks of the origin node will be made by calling the zone's + ``check_origin()`` method. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + + Returns a subclass of ``dns.zone.Zone``. + """ + + z = None + for r in xfr: + if z is None: + if relativize: + origin = r.origin + else: + origin = r.answer[0].name + rdclass = r.answer[0].rdclass + z = zone_factory(origin, rdclass, relativize=relativize) + for rrset in r.answer: + znode = z.nodes.get(rrset.name) + if not znode: + znode = z.node_factory() + z.nodes[rrset.name] = znode + zrds = znode.find_rdataset(rrset.rdclass, rrset.rdtype, + rrset.covers, True) + zrds.update_ttl(rrset.ttl) + for rd in rrset: + zrds.add(rd) + if check_origin: + z.check_origin() + return z diff --git a/venv/lib/python3.10/site-packages/dns/zone.pyi b/venv/lib/python3.10/site-packages/dns/zone.pyi new file mode 100644 index 0000000..272814f --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/zone.pyi @@ -0,0 +1,55 @@ +from typing import Generator, Optional, Union, Tuple, Iterable, Callable, Any, Iterator, TextIO, BinaryIO, Dict +from . import rdata, zone, rdataclass, name, rdataclass, message, rdatatype, exception, node, rdataset, rrset, rdatatype + +class BadZone(exception.DNSException): ... +class NoSOA(BadZone): ... +class NoNS(BadZone): ... +class UnknownOrigin(BadZone): ... + +class Zone: + def __getitem__(self, key : str) -> node.Node: + ... + def __init__(self, origin : Union[str,name.Name], rdclass : int = rdataclass.IN, relativize : bool = True) -> None: + self.nodes : Dict[str,node.Node] + self.origin = origin + def values(self): + return self.nodes.values() + def iterate_rdatas(self, rdtype : Union[int,str] = rdatatype.ANY, covers : Union[int,str] = None) -> Iterable[Tuple[name.Name, int, rdata.Rdata]]: + ... + def __iter__(self) -> Iterator[str]: + ... + def get_node(self, name : Union[name.Name,str], create=False) -> Optional[node.Node]: + ... + def find_rrset(self, name : Union[str,name.Name], rdtype : Union[int,str], covers=rdatatype.NONE) -> rrset.RRset: + ... + def find_rdataset(self, name : Union[str,name.Name], rdtype : Union[str,int], covers=rdatatype.NONE, + create=False) -> rdataset.Rdataset: + ... + def get_rdataset(self, name : Union[str,name.Name], rdtype : Union[str,int], covers=rdatatype.NONE, create=False) -> Optional[rdataset.Rdataset]: + ... + def get_rrset(self, name : Union[str,name.Name], rdtype : Union[str,int], covers=rdatatype.NONE) -> Optional[rrset.RRset]: + ... + def replace_rdataset(self, name : Union[str,name.Name], replacement : rdataset.Rdataset) -> None: + ... + def delete_rdataset(self, name : Union[str,name.Name], rdtype : Union[str,int], covers=rdatatype.NONE) -> None: + ... + def iterate_rdatasets(self, rdtype : Union[str,int] =rdatatype.ANY, + covers : Union[str,int] =rdatatype.NONE): + ... + def to_file(self, f : Union[TextIO, BinaryIO, str], sorted=True, relativize=True, nl : Optional[bytes] = None): + ... + def to_text(self, sorted=True, relativize=True, nl : Optional[str] = None) -> str: + ... + +def from_xfr(xfr : Generator[Any,Any,message.Message], zone_factory : Callable[..., zone.Zone] = zone.Zone, relativize=True, check_origin=True): + ... + +def from_text(text : str, origin : Optional[Union[str,name.Name]] = None, rdclass : int = rdataclass.IN, + relativize=True, zone_factory : Callable[...,zone.Zone] = zone.Zone, filename : Optional[str] = None, + allow_include=False, check_origin=True) -> zone.Zone: + ... + +def from_file(f, origin : Optional[Union[str,name.Name]] = None, rdclass=rdataclass.IN, + relativize=True, zone_factory : Callable[..., zone.Zone] = Zone, filename : Optional[str] = None, + allow_include=True, check_origin=True) -> zone.Zone: + ... diff --git a/venv/lib/python3.10/site-packages/dns/zonefile.py b/venv/lib/python3.10/site-packages/dns/zonefile.py new file mode 100644 index 0000000..53b4088 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dns/zonefile.py @@ -0,0 +1,624 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Zones.""" + +import re +import sys + +import dns.exception +import dns.name +import dns.node +import dns.rdataclass +import dns.rdatatype +import dns.rdata +import dns.rdtypes.ANY.SOA +import dns.rrset +import dns.tokenizer +import dns.transaction +import dns.ttl +import dns.grange + + +class UnknownOrigin(dns.exception.DNSException): + """Unknown origin""" + + +class CNAMEAndOtherData(dns.exception.DNSException): + """A node has a CNAME and other data""" + + +def _check_cname_and_other_data(txn, name, rdataset): + rdataset_kind = dns.node.NodeKind.classify_rdataset(rdataset) + node = txn.get_node(name) + if node is None: + # empty nodes are neutral. + return + node_kind = node.classify() + if node_kind == dns.node.NodeKind.CNAME and \ + rdataset_kind == dns.node.NodeKind.REGULAR: + raise CNAMEAndOtherData('rdataset type is not compatible with a ' + 'CNAME node') + elif node_kind == dns.node.NodeKind.REGULAR and \ + rdataset_kind == dns.node.NodeKind.CNAME: + raise CNAMEAndOtherData('CNAME rdataset is not compatible with a ' + 'regular data node') + # Otherwise at least one of the node and the rdataset is neutral, so + # adding the rdataset is ok + + +class Reader: + + """Read a DNS zone file into a transaction.""" + + def __init__(self, tok, rdclass, txn, allow_include=False, + allow_directives=True, force_name=None, + force_ttl=None, force_rdclass=None, force_rdtype=None, + default_ttl=None): + self.tok = tok + (self.zone_origin, self.relativize, _) = \ + txn.manager.origin_information() + self.current_origin = self.zone_origin + self.last_ttl = 0 + self.last_ttl_known = False + if force_ttl is not None: + default_ttl = force_ttl + if default_ttl is None: + self.default_ttl = 0 + self.default_ttl_known = False + else: + self.default_ttl = default_ttl + self.default_ttl_known = True + self.last_name = self.current_origin + self.zone_rdclass = rdclass + self.txn = txn + self.saved_state = [] + self.current_file = None + self.allow_include = allow_include + self.allow_directives = allow_directives + self.force_name = force_name + self.force_ttl = force_ttl + self.force_rdclass = force_rdclass + self.force_rdtype = force_rdtype + self.txn.check_put_rdataset(_check_cname_and_other_data) + + def _eat_line(self): + while 1: + token = self.tok.get() + if token.is_eol_or_eof(): + break + + def _get_identifier(self): + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + return token + + def _rr_line(self): + """Process one line from a DNS zone file.""" + token = None + # Name + if self.force_name is not None: + name = self.force_name + else: + if self.current_origin is None: + raise UnknownOrigin + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = self.tok.as_name(token, self.current_origin) + else: + token = self.tok.get() + if token.is_eol_or_eof(): + # treat leading WS followed by EOL/EOF as if they were EOL/EOF. + return + self.tok.unget(token) + name = self.last_name + if not name.is_subdomain(self.zone_origin): + self._eat_line() + return + if self.relativize: + name = name.relativize(self.zone_origin) + + # TTL + if self.force_ttl is not None: + ttl = self.force_ttl + self.last_ttl = ttl + self.last_ttl_known = True + else: + token = self._get_identifier() + ttl = None + try: + ttl = dns.ttl.from_text(token.value) + self.last_ttl = ttl + self.last_ttl_known = True + token = None + except dns.ttl.BadTTL: + if self.default_ttl_known: + ttl = self.default_ttl + elif self.last_ttl_known: + ttl = self.last_ttl + self.tok.unget(token) + + # Class + if self.force_rdclass is not None: + rdclass = self.force_rdclass + else: + token = self._get_identifier() + try: + rdclass = dns.rdataclass.from_text(token.value) + except dns.exception.SyntaxError: + raise + except Exception: + rdclass = self.zone_rdclass + self.tok.unget(token) + if rdclass != self.zone_rdclass: + raise dns.exception.SyntaxError("RR class is not zone's class") + + # Type + if self.force_rdtype is not None: + rdtype = self.force_rdtype + else: + token = self._get_identifier() + try: + rdtype = dns.rdatatype.from_text(token.value) + except Exception: + raise dns.exception.SyntaxError( + "unknown rdatatype '%s'" % token.value) + + try: + rd = dns.rdata.from_text(rdclass, rdtype, self.tok, + self.current_origin, self.relativize, + self.zone_origin) + except dns.exception.SyntaxError: + # Catch and reraise. + raise + except Exception: + # All exceptions that occur in the processing of rdata + # are treated as syntax errors. This is not strictly + # correct, but it is correct almost all of the time. + # We convert them to syntax errors so that we can emit + # helpful filename:line info. + (ty, va) = sys.exc_info()[:2] + raise dns.exception.SyntaxError( + "caught exception {}: {}".format(str(ty), str(va))) + + if not self.default_ttl_known and rdtype == dns.rdatatype.SOA: + # The pre-RFC2308 and pre-BIND9 behavior inherits the zone default + # TTL from the SOA minttl if no $TTL statement is present before the + # SOA is parsed. + self.default_ttl = rd.minimum + self.default_ttl_known = True + if ttl is None: + # if we didn't have a TTL on the SOA, set it! + ttl = rd.minimum + + # TTL check. We had to wait until now to do this as the SOA RR's + # own TTL can be inferred from its minimum. + if ttl is None: + raise dns.exception.SyntaxError("Missing default TTL value") + + self.txn.add(name, ttl, rd) + + def _parse_modify(self, side): + # Here we catch everything in '{' '}' in a group so we can replace it + # with ''. + is_generate1 = re.compile(r"^.*\$({(\+|-?)(\d+),(\d+),(.)}).*$") + is_generate2 = re.compile(r"^.*\$({(\+|-?)(\d+)}).*$") + is_generate3 = re.compile(r"^.*\$({(\+|-?)(\d+),(\d+)}).*$") + # Sometimes there are modifiers in the hostname. These come after + # the dollar sign. They are in the form: ${offset[,width[,base]]}. + # Make names + g1 = is_generate1.match(side) + if g1: + mod, sign, offset, width, base = g1.groups() + if sign == '': + sign = '+' + g2 = is_generate2.match(side) + if g2: + mod, sign, offset = g2.groups() + if sign == '': + sign = '+' + width = 0 + base = 'd' + g3 = is_generate3.match(side) + if g3: + mod, sign, offset, width = g3.groups() + if sign == '': + sign = '+' + base = 'd' + + if not (g1 or g2 or g3): + mod = '' + sign = '+' + offset = 0 + width = 0 + base = 'd' + + if base != 'd': + raise NotImplementedError() + + return mod, sign, offset, width, base + + def _generate_line(self): + # range lhs [ttl] [class] type rhs [ comment ] + """Process one line containing the GENERATE statement from a DNS + zone file.""" + if self.current_origin is None: + raise UnknownOrigin + + token = self.tok.get() + # Range (required) + try: + start, stop, step = dns.grange.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except Exception: + raise dns.exception.SyntaxError + + # lhs (required) + try: + lhs = token.value + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except Exception: + raise dns.exception.SyntaxError + + # TTL + try: + ttl = dns.ttl.from_text(token.value) + self.last_ttl = ttl + self.last_ttl_known = True + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.ttl.BadTTL: + if not (self.last_ttl_known or self.default_ttl_known): + raise dns.exception.SyntaxError("Missing default TTL value") + if self.default_ttl_known: + ttl = self.default_ttl + elif self.last_ttl_known: + ttl = self.last_ttl + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = self.zone_rdclass + if rdclass != self.zone_rdclass: + raise dns.exception.SyntaxError("RR class is not zone's class") + # Type + try: + rdtype = dns.rdatatype.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except Exception: + raise dns.exception.SyntaxError("unknown rdatatype '%s'" % + token.value) + + # rhs (required) + rhs = token.value + + # The code currently only supports base 'd', so the last value + # in the tuple _parse_modify returns is ignored + lmod, lsign, loffset, lwidth, _ = self._parse_modify(lhs) + rmod, rsign, roffset, rwidth, _ = self._parse_modify(rhs) + for i in range(start, stop + 1, step): + # +1 because bind is inclusive and python is exclusive + + if lsign == '+': + lindex = i + int(loffset) + elif lsign == '-': + lindex = i - int(loffset) + + if rsign == '-': + rindex = i - int(roffset) + elif rsign == '+': + rindex = i + int(roffset) + + lzfindex = str(lindex).zfill(int(lwidth)) + rzfindex = str(rindex).zfill(int(rwidth)) + + name = lhs.replace('$%s' % (lmod), lzfindex) + rdata = rhs.replace('$%s' % (rmod), rzfindex) + + self.last_name = dns.name.from_text(name, self.current_origin, + self.tok.idna_codec) + name = self.last_name + if not name.is_subdomain(self.zone_origin): + self._eat_line() + return + if self.relativize: + name = name.relativize(self.zone_origin) + + try: + rd = dns.rdata.from_text(rdclass, rdtype, rdata, + self.current_origin, self.relativize, + self.zone_origin) + except dns.exception.SyntaxError: + # Catch and reraise. + raise + except Exception: + # All exceptions that occur in the processing of rdata + # are treated as syntax errors. This is not strictly + # correct, but it is correct almost all of the time. + # We convert them to syntax errors so that we can emit + # helpful filename:line info. + (ty, va) = sys.exc_info()[:2] + raise dns.exception.SyntaxError("caught exception %s: %s" % + (str(ty), str(va))) + + self.txn.add(name, ttl, rd) + + def read(self): + """Read a DNS zone file and build a zone object. + + @raises dns.zone.NoSOA: No SOA RR was found at the zone origin + @raises dns.zone.NoNS: No NS RRset was found at the zone origin + """ + + try: + while 1: + token = self.tok.get(True, True) + if token.is_eof(): + if self.current_file is not None: + self.current_file.close() + if len(self.saved_state) > 0: + (self.tok, + self.current_origin, + self.last_name, + self.current_file, + self.last_ttl, + self.last_ttl_known, + self.default_ttl, + self.default_ttl_known) = self.saved_state.pop(-1) + continue + break + elif token.is_eol(): + continue + elif token.is_comment(): + self.tok.get_eol() + continue + elif token.value[0] == '$' and self.allow_directives: + c = token.value.upper() + if c == '$TTL': + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError("bad $TTL") + self.default_ttl = dns.ttl.from_text(token.value) + self.default_ttl_known = True + self.tok.get_eol() + elif c == '$ORIGIN': + self.current_origin = self.tok.get_name() + self.tok.get_eol() + if self.zone_origin is None: + self.zone_origin = self.current_origin + self.txn._set_origin(self.current_origin) + elif c == '$INCLUDE' and self.allow_include: + token = self.tok.get() + filename = token.value + token = self.tok.get() + if token.is_identifier(): + new_origin =\ + dns.name.from_text(token.value, + self.current_origin, + self.tok.idna_codec) + self.tok.get_eol() + elif not token.is_eol_or_eof(): + raise dns.exception.SyntaxError( + "bad origin in $INCLUDE") + else: + new_origin = self.current_origin + self.saved_state.append((self.tok, + self.current_origin, + self.last_name, + self.current_file, + self.last_ttl, + self.last_ttl_known, + self.default_ttl, + self.default_ttl_known)) + self.current_file = open(filename, 'r') + self.tok = dns.tokenizer.Tokenizer(self.current_file, + filename) + self.current_origin = new_origin + elif c == '$GENERATE': + self._generate_line() + else: + raise dns.exception.SyntaxError( + "Unknown zone file directive '" + c + "'") + continue + self.tok.unget(token) + self._rr_line() + except dns.exception.SyntaxError as detail: + (filename, line_number) = self.tok.where() + if detail is None: + detail = "syntax error" + ex = dns.exception.SyntaxError( + "%s:%d: %s" % (filename, line_number, detail)) + tb = sys.exc_info()[2] + raise ex.with_traceback(tb) from None + + +class RRsetsReaderTransaction(dns.transaction.Transaction): + + def __init__(self, manager, replacement, read_only): + assert not read_only + super().__init__(manager, replacement, read_only) + self.rdatasets = {} + + def _get_rdataset(self, name, rdtype, covers): + return self.rdatasets.get((name, rdtype, covers)) + + def _get_node(self, name): + rdatasets = [] + for (rdataset_name, _, _), rdataset in self.rdatasets.items(): + if name == rdataset_name: + rdatasets.append(rdataset) + if len(rdatasets) == 0: + return None + node = dns.node.Node() + node.rdatasets = rdatasets + return node + + def _put_rdataset(self, name, rdataset): + self.rdatasets[(name, rdataset.rdtype, rdataset.covers)] = rdataset + + def _delete_name(self, name): + # First remove any changes involving the name + remove = [] + for key in self.rdatasets: + if key[0] == name: + remove.append(key) + if len(remove) > 0: + for key in remove: + del self.rdatasets[key] + + def _delete_rdataset(self, name, rdtype, covers): + try: + del self.rdatasets[(name, rdtype, covers)] + except KeyError: + pass + + def _name_exists(self, name): + for (n, _, _) in self.rdatasets: + if n == name: + return True + return False + + def _changed(self): + return len(self.rdatasets) > 0 + + def _end_transaction(self, commit): + if commit and self._changed(): + rrsets = [] + for (name, _, _), rdataset in self.rdatasets.items(): + rrset = dns.rrset.RRset(name, rdataset.rdclass, rdataset.rdtype, + rdataset.covers) + rrset.update(rdataset) + rrsets.append(rrset) + self.manager.set_rrsets(rrsets) + + def _set_origin(self, origin): + pass + + +class RRSetsReaderManager(dns.transaction.TransactionManager): + def __init__(self, origin=dns.name.root, relativize=False, + rdclass=dns.rdataclass.IN): + self.origin = origin + self.relativize = relativize + self.rdclass = rdclass + self.rrsets = [] + + def writer(self, replacement=False): + assert replacement is True + return RRsetsReaderTransaction(self, True, False) + + def get_class(self): + return self.rdclass + + def origin_information(self): + if self.relativize: + effective = dns.name.empty + else: + effective = self.origin + return (self.origin, self.relativize, effective) + + def set_rrsets(self, rrsets): + self.rrsets = rrsets + + +def read_rrsets(text, name=None, ttl=None, rdclass=dns.rdataclass.IN, + default_rdclass=dns.rdataclass.IN, + rdtype=None, default_ttl=None, idna_codec=None, + origin=dns.name.root, relativize=False): + """Read one or more rrsets from the specified text, possibly subject + to restrictions. + + *text*, a file object or a string, is the input to process. + + *name*, a string, ``dns.name.Name``, or ``None``, is the owner name of + the rrset. If not ``None``, then the owner name is "forced", and the + input must not specify an owner name. If ``None``, then any owner names + are allowed and must be present in the input. + + *ttl*, an ``int``, string, or None. If not ``None``, the the TTL is + forced to be the specified value and the input must not specify a TTL. + If ``None``, then a TTL may be specified in the input. If it is not + specified, then the *default_ttl* will be used. + + *rdclass*, a ``dns.rdataclass.RdataClass``, string, or ``None``. If + not ``None``, then the class is forced to the specified value, and the + input must not specify a class. If ``None``, then the input may specify + a class that matches *default_rdclass*. Note that it is not possible to + return rrsets with differing classes; specifying ``None`` for the class + simply allows the user to optionally type a class as that may be convenient + when cutting and pasting. + + *default_rdclass*, a ``dns.rdataclass.RdataClass`` or string. The class + of the returned rrsets. + + *rdtype*, a ``dns.rdatatype.RdataType``, string, or ``None``. If not + ``None``, then the type is forced to the specified value, and the + input must not specify a type. If ``None``, then a type must be present + for each RR. + + *default_ttl*, an ``int``, string, or ``None``. If not ``None``, then if + the TTL is not forced and is not specified, then this value will be used. + if ``None``, then if the TTL is not forced an error will occur if the TTL + is not specified. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. Note that codecs only apply to the owner name; dnspython does + not do IDNA for names in rdata, as there is no IDNA zonefile format. + + *origin*, a string, ``dns.name.Name``, or ``None``, is the origin for any + relative names in the input, and also the origin to relativize to if + *relativize* is ``True``. + + *relativize*, a bool. If ``True``, names are relativized to the *origin*; + if ``False`` then any relative names in the input are made absolute by + appending the *origin*. + """ + if isinstance(origin, str): + origin = dns.name.from_text(origin, dns.name.root, idna_codec) + if isinstance(name, str): + name = dns.name.from_text(name, origin, idna_codec) + if isinstance(ttl, str): + ttl = dns.ttl.from_text(ttl) + if isinstance(default_ttl, str): + default_ttl = dns.ttl.from_text(default_ttl) + if rdclass is not None: + rdclass = dns.rdataclass.RdataClass.make(rdclass) + default_rdclass = dns.rdataclass.RdataClass.make(default_rdclass) + if rdtype is not None: + rdtype = dns.rdatatype.RdataType.make(rdtype) + manager = RRSetsReaderManager(origin, relativize, default_rdclass) + with manager.writer(True) as txn: + tok = dns.tokenizer.Tokenizer(text, '', idna_codec=idna_codec) + reader = Reader(tok, default_rdclass, txn, allow_directives=False, + force_name=name, force_ttl=ttl, force_rdclass=rdclass, + force_rdtype=rdtype, default_ttl=default_ttl) + reader.read() + return manager.rrsets diff --git a/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/LICENSE b/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/LICENSE new file mode 100644 index 0000000..390a726 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/LICENSE @@ -0,0 +1,35 @@ +ISC License + +Copyright (C) Dnspython Contributors + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all +copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR +PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + + +Copyright (C) 2001-2017 Nominum, Inc. +Copyright (C) Google Inc. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose with or without fee is hereby granted, +provided that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/METADATA b/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/METADATA new file mode 100644 index 0000000..c220906 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/METADATA @@ -0,0 +1,121 @@ +Metadata-Version: 2.1 +Name: dnspython +Version: 2.2.1 +Summary: DNS toolkit +Home-page: https://www.dnspython.org +License: ISC +Author: Bob Halley +Author-email: halley@dnspython.org +Requires-Python: >=3.6,<4.0 +Classifier: License :: OSI Approved +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Provides-Extra: curio +Provides-Extra: dnssec +Provides-Extra: doh +Provides-Extra: idna +Provides-Extra: trio +Provides-Extra: wmi +Requires-Dist: cryptography (>=2.6,<37.0); extra == "dnssec" +Requires-Dist: curio (>=1.2,<2.0); extra == "curio" +Requires-Dist: h2 (>=4.1.0); (python_full_version >= "3.6.2") and (extra == "doh") +Requires-Dist: httpx (>=0.21.1); (python_full_version >= "3.6.2") and (extra == "doh") +Requires-Dist: idna (>=2.1,<4.0); extra == "idna" +Requires-Dist: requests (>=2.23.0,<3.0.0); extra == "doh" +Requires-Dist: requests-toolbelt (>=0.9.1,<0.10.0); extra == "doh" +Requires-Dist: sniffio (>=1.1,<2.0); extra == "curio" +Requires-Dist: trio (>=0.14,<0.20); extra == "trio" +Requires-Dist: wmi (>=1.5.1,<2.0.0); extra == "wmi" +Project-URL: Bug Tracker, https://github.com/rthalley/dnspython/issues +Project-URL: Documentation, https://dnspython.readthedocs.io/en/stable/ +Project-URL: Repository, https://github.com/rthalley/dnspython.git +Description-Content-Type: text/markdown + +# dnspython + +[![Build Status](https://github.com/rthalley/dnspython/actions/workflows/python-package.yml/badge.svg)](https://github.com/rthalley/dnspython/actions/) +[![Documentation Status](https://readthedocs.org/projects/dnspython/badge/?version=latest)](https://dnspython.readthedocs.io/en/latest/?badge=latest) +[![PyPI version](https://badge.fury.io/py/dnspython.svg)](https://badge.fury.io/py/dnspython) +[![License: ISC](https://img.shields.io/badge/License-ISC-brightgreen.svg)](https://opensource.org/licenses/ISC) + +## INTRODUCTION + +dnspython is a DNS toolkit for Python. It supports almost all record types. It +can be used for queries, zone transfers, and dynamic updates. It supports TSIG +authenticated messages and EDNS0. + +dnspython provides both high and low level access to DNS. The high level classes +perform queries for data of a given name, type, and class, and return an answer +set. The low level classes allow direct manipulation of DNS zones, messages, +names, and records. + +To see a few of the ways dnspython can be used, look in the `examples/` +directory. + +dnspython is a utility to work with DNS, `/etc/hosts` is thus not used. For +simple forward DNS lookups, it's better to use `socket.getaddrinfo()` or +`socket.gethostbyname()`. + +dnspython originated at Nominum where it was developed +to facilitate the testing of DNS software. + +## ABOUT THIS RELEASE + +This is dnspython 2.2.1 +Please read +[What's New](https://dnspython.readthedocs.io/en/stable/whatsnew.html) for +information about the changes in this release. + +## INSTALLATION + +* Many distributions have dnspython packaged for you, so you should + check there first. +* If you have pip installed, you can do `pip install dnspython` +* If not just download the source file and unzip it, then run + `sudo python setup.py install` +* To install the latest from the master branch, run `pip install git+https://github.com/rthalley/dnspython.git` + +Dnspython's default installation does not depend on any modules other than +those in the Python standard library. To use some features, additional modules +must be installed. For convenience, pip options are defined for the requirements. + +If you want to use DNS-over-HTTPS, run +`pip install dnspython[doh]`. + +If you want to use DNSSEC functionality, run +`pip install dnspython[dnssec]`. + +If you want to use internationalized domain names (IDNA) +functionality, you must run +`pip install dnspython[idna]` + +If you want to use the Trio asynchronous I/O package, run +`pip install dnspython[trio]`. + +If you want to use the Curio asynchronous I/O package, run +`pip install dnspython[curio]`. + +If you want to use WMI on Windows to determine the active DNS settings +instead of the default registry scanning method, run +`pip install dnspython[wmi]`. + +Note that you can install any combination of the above, e.g.: +`pip install dnspython[doh,dnssec,idna]` + +### Notices + +Python 2.x support ended with the release of 1.16.0. Dnspython 2.0.0 through +2.2.x support Python 3.6 and later. As of dnspython 2.3.0, the minimum +supported Python version will be 3.7. We plan to align future support with the +lifetime of the Python 3 versions. + +Documentation has moved to +[dnspython.readthedocs.io](https://dnspython.readthedocs.io). + +The ChangeLog has been discontinued. Please see the github project page +and git history for detailed change information. + diff --git a/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/RECORD b/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/RECORD new file mode 100644 index 0000000..0173501 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/RECORD @@ -0,0 +1,282 @@ +dns/__init__.py,sha256=Tnvxutf2E4hCu2RMU7ADmiVnqOikSWxgCcSJ9xuR-v4,1597 +dns/__pycache__/__init__.cpython-310.pyc,, +dns/__pycache__/_asyncbackend.cpython-310.pyc,, +dns/__pycache__/_asyncio_backend.cpython-310.pyc,, +dns/__pycache__/_curio_backend.cpython-310.pyc,, +dns/__pycache__/_immutable_attr.cpython-310.pyc,, +dns/__pycache__/_immutable_ctx.cpython-310.pyc,, +dns/__pycache__/_trio_backend.cpython-310.pyc,, +dns/__pycache__/asyncbackend.cpython-310.pyc,, +dns/__pycache__/asyncquery.cpython-310.pyc,, +dns/__pycache__/asyncresolver.cpython-310.pyc,, +dns/__pycache__/dnssec.cpython-310.pyc,, +dns/__pycache__/e164.cpython-310.pyc,, +dns/__pycache__/edns.cpython-310.pyc,, +dns/__pycache__/entropy.cpython-310.pyc,, +dns/__pycache__/enum.cpython-310.pyc,, +dns/__pycache__/exception.cpython-310.pyc,, +dns/__pycache__/flags.cpython-310.pyc,, +dns/__pycache__/grange.cpython-310.pyc,, +dns/__pycache__/immutable.cpython-310.pyc,, +dns/__pycache__/inet.cpython-310.pyc,, +dns/__pycache__/ipv4.cpython-310.pyc,, +dns/__pycache__/ipv6.cpython-310.pyc,, +dns/__pycache__/message.cpython-310.pyc,, +dns/__pycache__/name.cpython-310.pyc,, +dns/__pycache__/namedict.cpython-310.pyc,, +dns/__pycache__/node.cpython-310.pyc,, +dns/__pycache__/opcode.cpython-310.pyc,, +dns/__pycache__/query.cpython-310.pyc,, +dns/__pycache__/rcode.cpython-310.pyc,, +dns/__pycache__/rdata.cpython-310.pyc,, +dns/__pycache__/rdataclass.cpython-310.pyc,, +dns/__pycache__/rdataset.cpython-310.pyc,, +dns/__pycache__/rdatatype.cpython-310.pyc,, +dns/__pycache__/renderer.cpython-310.pyc,, +dns/__pycache__/resolver.cpython-310.pyc,, +dns/__pycache__/reversename.cpython-310.pyc,, +dns/__pycache__/rrset.cpython-310.pyc,, +dns/__pycache__/serial.cpython-310.pyc,, +dns/__pycache__/set.cpython-310.pyc,, +dns/__pycache__/tokenizer.cpython-310.pyc,, +dns/__pycache__/transaction.cpython-310.pyc,, +dns/__pycache__/tsig.cpython-310.pyc,, +dns/__pycache__/tsigkeyring.cpython-310.pyc,, +dns/__pycache__/ttl.cpython-310.pyc,, +dns/__pycache__/update.cpython-310.pyc,, +dns/__pycache__/version.cpython-310.pyc,, +dns/__pycache__/versioned.cpython-310.pyc,, +dns/__pycache__/win32util.cpython-310.pyc,, +dns/__pycache__/wire.cpython-310.pyc,, +dns/__pycache__/xfr.cpython-310.pyc,, +dns/__pycache__/zone.cpython-310.pyc,, +dns/__pycache__/zonefile.cpython-310.pyc,, +dns/_asyncbackend.py,sha256=BE_E1JcVGFjwiebX7YJqqWY8YvKtFPv3WhC7KAdgh1k,1836 +dns/_asyncio_backend.py,sha256=xEAz-YG65PlKHM9qBm_WclI4uixyUsrp7u7cHSWB4tc,4897 +dns/_curio_backend.py,sha256=qauX-hk-hE7Z27lGmFPP295QC0zkVeN_Wp7zfg3GQLA,3411 +dns/_immutable_attr.py,sha256=aJY_P31E2rNnrin-eghpuKZ3sMcYsWhXG5NVm1mzGsY,2990 +dns/_immutable_ctx.py,sha256=Z8DQkCvYkHiCyDUQTmS1CRolsA3b0P5QfkFm8mm4mGM,2458 +dns/_trio_backend.py,sha256=5ZO_lWpmEiFmXKiNuxsGQomHLBI6jdcpvXtzwoPNjPk,3769 +dns/asyncbackend.py,sha256=NdgU3osBpet_c6xsTdTQjKsVwRHgUDotLDlp9Y4cAGs,3078 +dns/asyncbackend.pyi,sha256=wfRnXld2IZG5m4LgZGf1_7EhiXsdXFRudDVqPdWY9Ps,277 +dns/asyncquery.py,sha256=jRm7hu0_BRRGKOQPdoSjh9ULwSD8yXcS9a-2x1UbqFo,21360 +dns/asyncquery.pyi,sha256=a1TYKd1W3AzysGgDnPeuSp2lVV5X8w9_gHPXryyGOgM,1873 +dns/asyncresolver.py,sha256=Ln7OYoyUGwVmFQn8btsAey6_ow0nvCXEBxr7kr1ai2s,9109 +dns/asyncresolver.pyi,sha256=D-ZyCDplcFPL_p-nX-PYRiNTuFuuAotNDyJZ4O18KgA,1254 +dns/dnssec.py,sha256=ackfBtuuyUIPQRq79PZ46vhcMvqshPwgdgSJ0MQDbt0,18573 +dns/dnssec.pyi,sha256=7qtK0B06R0nEBFlsH8GXo4xcqzOYQOzPjfKkUawBo64,994 +dns/e164.py,sha256=lVQxMZTruqlv7FKruGqaVjhLidN-N15g_xu8yqPYOW4,3690 +dns/e164.pyi,sha256=C6GFXm4lkgO826nEioD0Ycgaaa1mDNYtKfOwJqyBqPY,370 +dns/edns.py,sha256=g42aB8sOJ8ScXh1WB5ASwobA1W-O6rCOAHsRmXsyK5Y,13120 +dns/entropy.py,sha256=bCCf7rtDG77aBjowjjCZTz4VpSzy7Af5_6kaXXw9mro,4163 +dns/entropy.pyi,sha256=-wdqp48eLqG6b6ZgBURMfSOxCS6AEmrgIk0IZR8_kUI,186 +dns/enum.py,sha256=9r8EiMBvqeXiqFgwfejjwXOVXxYA4sdi_yHNpp8OMi0,2817 +dns/exception.py,sha256=g7-mRZ_pywy9idbcQ3l7y2C05ol2i06UDaH2cTO6GiM,5259 +dns/exception.pyi,sha256=madDNd57ZA7ViKmIUNIEbvvm-iSbjVYdI3fmSYopl9c,326 +dns/flags.py,sha256=KhmIWEhaHsP0m0gm9MaQY0h_L08qGhYjI7_5cYYMw9U,2642 +dns/grange.py,sha256=r9dSdF6w4F2zX7ZWyFHWvvQQmh4DrKmjkmP4ex8yasg,2092 +dns/immutable.py,sha256=LOSKanIDSVc3DNQSspY59lVP-51BmMB8o-xka7t5D6Q,2022 +dns/inet.py,sha256=uqHy22Y7TjVpMK-TUr9yMgFnJ60Z7E71InZZify89_s,4857 +dns/inet.pyi,sha256=eodYxhbMAj7XiY68JIGAB92bsY8pVIB1u9RZSAWs7-A,96 +dns/ipv4.py,sha256=fLVc7CapzxRw-1Tj0j99MP776AgKDgZvdxMqZmy8FnM,1980 +dns/ipv6.py,sha256=AgPfhIm2JPRzW4IlCpkhU__SVaoWsn74mPrSK5HIrd4,6056 +dns/message.py,sha256=ANlF6iRqVyoO94GY9XN3fQjxpmjFdIG4FdIPqImAOXU,56291 +dns/message.pyi,sha256=mTwgJaMMxEKq8G-uzE_sRysmFqGN62YAHpEJWPQmk8U,2076 +dns/name.py,sha256=Le8Xp5-oyLrnABXeo16sA5LZMXso-N2im3Fdc6zz76A,32299 +dns/name.pyi,sha256=0v_vjKIXcpEOnR8aOacrhrYZvmVI0r1Er38CxODRfdg,1640 +dns/namedict.py,sha256=qywaqB1zaI1IKARu3lcDXAroEmU0vK7hvXgUZVLVhGU,3917 +dns/node.py,sha256=8yDPKkAWeWlcdkjNhHORgPBdIyPLL1EbgL19fp-7oxE,11454 +dns/node.pyi,sha256=ULdI6bOsyD-eyGmjLzqOnS-CL8tdCcVRZTEiGxQR960,731 +dns/opcode.py,sha256=Qxwga_fAR2Bc2EdlTSguKKvrBxphVYnAcMkQc2VGSO0,2647 +dns/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +dns/query.py,sha256=8OS9lQffAaybnn0wx3TfTsXL2DI4FjXjvcGiPQWlHcM,43616 +dns/query.pyi,sha256=r1Cm0l0IMlJutZAKR5utj6Hbi9yh_Ebib4oIPtrabFo,2561 +dns/rcode.py,sha256=7g-6cX96AnUSFxQpv6ZzSOnhjBd_ZQwLECNddk_5690,3991 +dns/rdata.py,sha256=vlLZ-0QJmDBIBBr_dswRVdZP93BnNZD5fwSuQa9YkWc,27626 +dns/rdata.pyi,sha256=_dHzexi0bfldpU062ud2kCEY1bsp7ObpFals32MPFrE,856 +dns/rdataclass.py,sha256=V99UXfb2bXERDWEH3v5NK9owly62PlLDkrGmDAOgqZg,2881 +dns/rdataset.py,sha256=4rIY1dxiDxBP8sY6LL6zWJCdcHO6ms3dYoIZGFDPnSk,14884 +dns/rdataset.pyi,sha256=70Gut6vReS7ZH-P7-kGaAqGGnRVJBlT9r33N50ERPAM,2000 +dns/rdatatype.py,sha256=ARI9FUtK2aG1P64IpFcZvaQ8QX7egdmzluY9mosQUyA,6972 +dns/rdtypes/ANY/AFSDB.py,sha256=SUdLEDU_H23BnN_kYsL5hDHn6lAvfkNg3oGXt6FnFVc,1662 +dns/rdtypes/ANY/AMTRELAY.py,sha256=ndfIZ8ffnZC_Q30eWuYvJRdmrw0ZRpvMDPHRo4-E4t4,3439 +dns/rdtypes/ANY/AVC.py,sha256=LHF3DeIbJVQPHsHx1W5_0d2QB5u8wx3CCvbDcTzMAig,1025 +dns/rdtypes/ANY/CAA.py,sha256=noqfT9RZ6yTiP-vvwjjRDQevzWc6B1VcRihxGnJ8mpI,2531 +dns/rdtypes/ANY/CDNSKEY.py,sha256=P0dTZBCeUBw1GvvwQZFOAhJZnATQXeI8-h78kOdl_zQ,1148 +dns/rdtypes/ANY/CDS.py,sha256=yLltG-Tv7BxZZgRTvVD4Cz-LneqRLIGSDaqe80VrEbs,1164 +dns/rdtypes/ANY/CERT.py,sha256=NoZ3dkX7yqJsOPNjC7ysFThjSM372JtKUVvzkA-CyKg,3576 +dns/rdtypes/ANY/CNAME.py,sha256=OvSnWyZ6gomsicHCgxES9G3upRKbGLxLK5Vt2IhUXcc,1207 +dns/rdtypes/ANY/CSYNC.py,sha256=8FXo7LZ8aaKW_KVC27M8Ev-2nAv4LJEVfaTOa3XWq6E,2444 +dns/rdtypes/ANY/DLV.py,sha256=9AQWoYOS9i-GdlsIn6Y-3x9MWKLeDddAyDXZWT8Hsik,987 +dns/rdtypes/ANY/DNAME.py,sha256=Oil8B_mgVQ6_YvxDIVNO3AssA_RtsyeZmZJTUBXD_0g,1151 +dns/rdtypes/ANY/DNSKEY.py,sha256=an8gUCA7Cbm14weUBh7_gP7q237OQ29Cpjjnq9ylPfM,1146 +dns/rdtypes/ANY/DS.py,sha256=KWhwhK-mKuJvjb4jujnfeiRwHVjznsZ8MfBJ-fmz0WI,996 +dns/rdtypes/ANY/EUI48.py,sha256=BvcEhncVjcRktBxZw0dWx_wYP5JGVBnpAzMypixTW7w,1152 +dns/rdtypes/ANY/EUI64.py,sha256=qfqRmkc-wXmPpN4p559kpH4sc0JG5uBpQjsx0ZNaU30,1162 +dns/rdtypes/ANY/GPOS.py,sha256=L39btXLAXcsO-L1pebF_Q7rS_wwV4OjxHKi2uABrq24,4521 +dns/rdtypes/ANY/HINFO.py,sha256=iGNYKrnopV6nxz_Ium3If9kWxnHXlaUPks8hfObcQj8,2266 +dns/rdtypes/ANY/HIP.py,sha256=oUVNi3dCbUsodR2yU9mUuxeoU5RbR4DPQ0XJ-jdDBbw,3235 +dns/rdtypes/ANY/ISDN.py,sha256=oAS0EzlxLlJS-N5LTuuMDH-WVqo0uPkNZkTDMUEqoFE,2726 +dns/rdtypes/ANY/L32.py,sha256=_ZhdVp8XY1S6Hf6Paz252M5coIUegE8ZIBOt_XLUBGs,1274 +dns/rdtypes/ANY/L64.py,sha256=rOvOPPQFAxwjnvgfHHCqiurFtfuMf06L4McTB3reppI,1653 +dns/rdtypes/ANY/LOC.py,sha256=EIP-zmYZXDGQTmZZiYl_GjALVZkHTf0KnTRXY0lwRzQ,11961 +dns/rdtypes/ANY/LP.py,sha256=3_fmPbgVJx1Elt5qz35CfzOOEeDSUngax1jYEdLIhd4,1326 +dns/rdtypes/ANY/MX.py,sha256=OWfJEANZXF1gKYgXXRAkpz_BYlxxQ8cy0QgKPc7K0bA,996 +dns/rdtypes/ANY/NID.py,sha256=UsSf_a0sr5MLDlWcKGKx6lxeFMX-kknlsCikZhKhTJQ,1549 +dns/rdtypes/ANY/NINFO.py,sha256=Fn7D8tXFbUIBRSEKlsd92T9M_O1xOGqKvsgX6aeRAco,1042 +dns/rdtypes/ANY/NS.py,sha256=OdaHATafwdGDk5AABbW8E3DIUtr5zuItWNRB7AUg-Go,996 +dns/rdtypes/ANY/NSEC.py,sha256=un6wfA5m6oTBCIPp2l4dEzn3iQWN7c4tRUCmi-M_HkU,2480 +dns/rdtypes/ANY/NSEC3.py,sha256=zzOvduy1lW6M3NNAIicBZQxFX0ewQaspzgfRL4jFGJ0,4073 +dns/rdtypes/ANY/NSEC3PARAM.py,sha256=jnJbT83o2lIxPj2lUY6Euid6txdJ-vDuXmuUrLiLylM,2703 +dns/rdtypes/ANY/OPENPGPKEY.py,sha256=ecAhn5sfg4Eub7UsPsA6I82qEJgPg5T-f-VKkUDgrzE,1855 +dns/rdtypes/ANY/OPT.py,sha256=shPDGC4x-Sjy_DHQ9hWaf_7ZBUGcKQYprJE-U29wwrs,2560 +dns/rdtypes/ANY/PTR.py,sha256=GrTYECslYH8wafdIyNhdOqwOMHm-h8cO954DRw3VMNg,998 +dns/rdtypes/ANY/RP.py,sha256=pkJD4LZ2UHDdIp4byPFGg0-JvRcBkuvwY8zu9KFlh4c,2189 +dns/rdtypes/ANY/RRSIG.py,sha256=fpeg_Wc3GtbLxYQtGKLXioQ7A-KVz3JNGq5K0NCk12k,4701 +dns/rdtypes/ANY/RT.py,sha256=9CHkE9dKz2n_RZpuG4zOOWOi4fW3tahxllRXF8043WI,1014 +dns/rdtypes/ANY/SMIMEA.py,sha256=6yjHuVDfIEodBU9wxbCGCDZ5cWYwyY6FCk-aq2VNU0s,222 +dns/rdtypes/ANY/SOA.py,sha256=ptfmGk_2onH8wRfhGqaIcM_M77Cj2bCsEYauDm6YfzQ,3107 +dns/rdtypes/ANY/SPF.py,sha256=wHsKdQWUL0UcohQlbjkNkIfZUADtC-FZyIJNw8NFrIY,1023 +dns/rdtypes/ANY/SSHFP.py,sha256=9XPZLu3W4LB6SD6EoIebCxOR5yXdjE70ecy4xdE8UT4,2657 +dns/rdtypes/ANY/TKEY.py,sha256=g6CYNPjSRFlO7u0uylC-owaF_u-PHqk54RUKqwx2hVg,4834 +dns/rdtypes/ANY/TLSA.py,sha256=EYP7AXBh4zMtBgnfz828qfoLkFLCm5g8vR-6oX_DTbE,219 +dns/rdtypes/ANY/TSIG.py,sha256=d1lRKt7nz9Cn-mpF5LW78p7M4ANtKL5eAq2BSwLd880,4463 +dns/rdtypes/ANY/TXT.py,sha256=7IAIjgZ0hX_MIh_b0ApzKvxLXHTS0rteR-KXt4HLaV0,1001 +dns/rdtypes/ANY/URI.py,sha256=0uR6_JoOG13euDQBjb11QtI84jSGOPiMrYOmkmVYDZ4,2957 +dns/rdtypes/ANY/X25.py,sha256=sd0jJcuowZYP7P1aABLHwNffp9Ls5Ru0hrCK4e9EkIQ,1949 +dns/rdtypes/ANY/ZONEMD.py,sha256=GqQh1R4OvGYY-HHpN6aB-XyZmumLmXzTXkw1Wsbduew,2480 +dns/rdtypes/ANY/__init__.py,sha256=yQK08BH-28pIzIorr82nTEWkC_7IuKzv_WmU6VkW2yY,1497 +dns/rdtypes/ANY/__pycache__/AFSDB.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/AVC.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/CAA.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/CDS.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/CERT.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/CNAME.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/CSYNC.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/DLV.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/DNAME.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/DS.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/EUI48.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/EUI64.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/GPOS.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/HINFO.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/HIP.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/ISDN.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/L32.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/L64.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/LOC.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/LP.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/MX.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/NID.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/NINFO.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/NS.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/NSEC.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/NSEC3.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/OPT.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/PTR.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/RP.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/RRSIG.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/RT.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/SOA.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/SPF.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/SSHFP.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/TKEY.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/TLSA.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/TSIG.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/TXT.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/URI.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/X25.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-310.pyc,, +dns/rdtypes/ANY/__pycache__/__init__.cpython-310.pyc,, +dns/rdtypes/CH/A.py,sha256=00CyWdEU67RT--0dXVrz-uBi_bR-hxP6QxSqB6rdv9A,2220 +dns/rdtypes/CH/__init__.py,sha256=Ef8Z58kdxbLpTqtgP_u3MfVQoqwb3If2T68YEwIMosw,923 +dns/rdtypes/CH/__pycache__/A.cpython-310.pyc,, +dns/rdtypes/CH/__pycache__/__init__.cpython-310.pyc,, +dns/rdtypes/IN/A.py,sha256=jlL0tINvsyzigE0IsHrrhscuygv0Vwv8_r44q4eNkXw,1819 +dns/rdtypes/IN/AAAA.py,sha256=jVoTTLgLqQF2qjkXvanm4tdC5bE_BPG27qeJTXwrdYQ,1825 +dns/rdtypes/IN/APL.py,sha256=0R-SfJjxzdnLiW8CNBh4ZzICliI3jIJ06s344YcdY64,5104 +dns/rdtypes/IN/DHCID.py,sha256=3OsqQ8rJ44QBNR6IBI_DHc0Rv78rEUP-sWS7P-IB_NA,1844 +dns/rdtypes/IN/HTTPS.py,sha256=wzVAayp-i1AACNh4v6RY1ZKSbIq61-jDc9Fmeoat6Sc,219 +dns/rdtypes/IN/IPSECKEY.py,sha256=JFwSGGE3GeawuOJ2siVWcaPTL_c7cyoiQ9ik8-rKY7A,3435 +dns/rdtypes/IN/KX.py,sha256=govh0YTer2-mpfhxUwc-fQb1WnqMkEkq5jIRo20nh1E,1014 +dns/rdtypes/IN/NAPTR.py,sha256=CjXtq006fjZkyBBfc6IPzYS-IPwwp0qz7zlR1g4UjXQ,3682 +dns/rdtypes/IN/NSAP.py,sha256=_7Qf9fQK_1JRHrTBIyac_04E6gpz2iznmU3unPxLgrw,2170 +dns/rdtypes/IN/NSAP_PTR.py,sha256=hTnieARrAxO-yuFeMppj4wlRX6gv6RC089hLaurs-UQ,1016 +dns/rdtypes/IN/PX.py,sha256=C238E8bGbR3KcyQw0QIhUCNdkzCQ4bmEewrlm5QU1S8,2761 +dns/rdtypes/IN/SRV.py,sha256=-2tjLp2DJl1QvsGcdy3DE4oZhyvqqvZ8GL7zEr3ySG8,2806 +dns/rdtypes/IN/SVCB.py,sha256=SrGsGjBqrnskBmxBOZQej_uyPqtBnZmki0jlPoSTud0,217 +dns/rdtypes/IN/WKS.py,sha256=RdapAXarcDER6790yZg2JB9ufSoPFgIF2lRhOcAnGm8,3656 +dns/rdtypes/IN/__init__.py,sha256=UxnHrNHZXOuwEUNeBa4HcalgxUZhyuZ18TzhBBZnTv0,1083 +dns/rdtypes/IN/__pycache__/A.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/AAAA.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/APL.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/DHCID.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/HTTPS.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/KX.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/NAPTR.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/NSAP.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/PX.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/SRV.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/SVCB.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/WKS.cpython-310.pyc,, +dns/rdtypes/IN/__pycache__/__init__.cpython-310.pyc,, +dns/rdtypes/__init__.py,sha256=aQ6_FR44VZIeSILzd8xM5qTnWGg9oQ4im2yS__zK2Rw,1072 +dns/rdtypes/__pycache__/__init__.cpython-310.pyc,, +dns/rdtypes/__pycache__/dnskeybase.cpython-310.pyc,, +dns/rdtypes/__pycache__/dsbase.cpython-310.pyc,, +dns/rdtypes/__pycache__/euibase.cpython-310.pyc,, +dns/rdtypes/__pycache__/mxbase.cpython-310.pyc,, +dns/rdtypes/__pycache__/nsbase.cpython-310.pyc,, +dns/rdtypes/__pycache__/svcbbase.cpython-310.pyc,, +dns/rdtypes/__pycache__/tlsabase.cpython-310.pyc,, +dns/rdtypes/__pycache__/txtbase.cpython-310.pyc,, +dns/rdtypes/__pycache__/util.cpython-310.pyc,, +dns/rdtypes/dnskeybase.py,sha256=b35nr-NbOV9TbQbRBeG_Yi1lYQIZo4_oFKCOZHThdkE,2836 +dns/rdtypes/dnskeybase.pyi,sha256=KWTKjvLuMc6g-z_4UoGyRxKEyxMwzFYY3Gu9Nlm4Fcg,878 +dns/rdtypes/dsbase.py,sha256=3o-lIKihV6iNQw4d0eVKgB98KG5OPjMuEBugO51zlJY,3555 +dns/rdtypes/euibase.py,sha256=oluvkd1ARs953vtPJODrw2Rfdm2rN58lkliOOIphW_M,2682 +dns/rdtypes/mxbase.py,sha256=N5i0iHl6Xoe0V8iSrT8C4Nprc2iyV9qut90bRJvQST4,3203 +dns/rdtypes/nsbase.py,sha256=gofwZ0GN3BCTLTT56JiXxr2IfuwsNZz13H5y_IjWl9g,2329 +dns/rdtypes/svcbbase.py,sha256=jw97ogo7vsKaFFCLmNnLcAEWMNnnbqtjRFN4LKMPdks,16945 +dns/rdtypes/tlsabase.py,sha256=JPH5F8sFhub3nkpbWpVTT7UZjKi117gI8BXjeSbr2fo,2755 +dns/rdtypes/txtbase.py,sha256=GQKGQfdrfLKMVCkNXrxEbZbH1rPFBQEWl4uveDLIxrE,3140 +dns/rdtypes/txtbase.pyi,sha256=Jbvuq14fNJ7xB9Mx6QZiggaTJmNjoVX9t_WqKbVI7qE,333 +dns/rdtypes/util.py,sha256=0nwDeIV51sovYbarpj_ZdedC18ixUNjC5onn6ttpRCc,8751 +dns/renderer.py,sha256=Q8iZh5XrPKKIhhyQMASj56tkN2ZAGBeUH0McW0QHSC0,8801 +dns/resolver.py,sha256=zyW1hc0AT9YaLw_2qHEGfvvYlx8ayjnvyHDa_ZNVA2Y,58168 +dns/resolver.pyi,sha256=uFi1PNo2tLjtad_WQeH3sVLc83R1sd5q2GL5J6URDFw,2537 +dns/reversename.py,sha256=KXXAMSs9TWZL10lzAlygJzT_Ye_p7rPAxMgR1cutKME,3733 +dns/reversename.pyi,sha256=VYOBIygYXW9DlDQDHBoiG9Vc-kOUYnbw2h9iJDvLlz8,120 +dns/rrset.py,sha256=Xk2qJanJFq4ZUsCxlQR1aPbIC5MkMrx_RmJWjYoBStk,7622 +dns/rrset.pyi,sha256=G9Kh4fpCfwf9lVUXOcrdBCFLjZkWOfyEn66lQb1U3D0,394 +dns/serial.py,sha256=QdTGtDYck-htn0X0Ix6fKVF2LtGhOh-ZnBaTIUQr3wI,3615 +dns/set.py,sha256=IbEuUBTaiPCm51JUJiLOb6rarfZ8ldPfmzMK1NkkyUw,7823 +dns/tokenizer.py,sha256=d9X0ELBB8WXwJzb7qGvv_f4c4Ln00D6vK5nTL6SIpo8,22634 +dns/transaction.py,sha256=MauNYi3ag-K6tpJzvPdAerfqzXuqyAzhc6QA9QUTOVo,20387 +dns/tsig.py,sha256=0-Y8KKAHV2UVgEUDaycIN89Miqtas95ydtpF4XNoB6g,11254 +dns/tsigkeyring.py,sha256=uGsU4jYQ-C2i8U0dGMFNF_378zM-CPuJvaqfAbJvO9k,2501 +dns/tsigkeyring.pyi,sha256=J7VraMYWqtZ2Ud_9SXH8TISlO9PRplBaJaDwOlaOl4s,190 +dns/ttl.py,sha256=O8Ne9WY70O0gnzRj_WlRTYM6c3uLqr8cX_Z53Istjh4,2917 +dns/update.py,sha256=hOhO4I0hlFgb-DmcidAE4nNsJn3D6OWphgfQEIDAm5s,10807 +dns/update.pyi,sha256=1HnX-EvnGiH5-XMCGn01bQSbP2MMkPQidMxsofR4UTo,1006 +dns/version.py,sha256=3AmnNp7qmXIgvnR5wJ4QYk9RKW4FbY6DqZ6R8gvJOh0,1591 +dns/versioned.py,sha256=C_pveMZvnL6uDba-x_cK6NjPZdHEVtk03U4ADvciuow,10497 +dns/win32util.py,sha256=Patl8zDni623uptIdIy1ktjHMNPE5wGXQ20lGREDBHM,8768 +dns/wire.py,sha256=ywbS4oTzkf0zz_CZK5ne5yZjZafGmfob1kXapI_PeXA,2540 +dns/xfr.py,sha256=-hzgUZ5Up6zeRRgy7fo2-VpmETKMM0-8KfDGLEPHVqE,12576 +dns/zone.py,sha256=C4kfkp6k8b84V4Hp-sREpb9f2rqkEutFApyEzeXnmTA,44839 +dns/zone.pyi,sha256=wNyCxNPwT0EFRh6UJqUHCYIxGffdvnD6uHOk987Ufvc,2958 +dns/zonefile.py,sha256=lyMUkpkCiSaDMvHFuP5T7q3v1fDDMWpI4RofF9jcvCU,24347 +dnspython-2.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +dnspython-2.2.1.dist-info/LICENSE,sha256=w-o_9WVLMpwZ07xfdIGvYjw93tSmFFWFSZ-EOtPXQc0,1526 +dnspython-2.2.1.dist-info/METADATA,sha256=qHya2ca45xgYRTOQQOlURSCrgORjMrfVDsKukFAdWKs,4883 +dnspython-2.2.1.dist-info/RECORD,, +dnspython-2.2.1.dist-info/WHEEL,sha256=DA86_h4QwwzGeRoz62o1svYt5kGEXpoUTuTtwzoTb30,83 diff --git a/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/WHEEL b/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/WHEEL new file mode 100644 index 0000000..4d81844 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dnspython-2.2.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: poetry 1.0.8 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/LICENSE b/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/LICENSE new file mode 100644 index 0000000..474479a --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/LICENSE @@ -0,0 +1,24 @@ +"python-ecdsa" Copyright (c) 2010 Brian Warner + +Portions written in 2005 by Peter Pearson and placed in the public domain. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/METADATA new file mode 100644 index 0000000..7b52cd7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/METADATA @@ -0,0 +1,658 @@ +Metadata-Version: 2.1 +Name: ecdsa +Version: 0.17.0 +Summary: ECDSA cryptographic signature library (pure python) +Home-page: http://github.com/tlsfuzzer/python-ecdsa +Author: Brian Warner +Author-email: warner@lothar.com +License: MIT +Platform: UNKNOWN +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.* +Description-Content-Type: text/markdown +Requires-Dist: six (>=1.9.0) +Provides-Extra: gmpy +Requires-Dist: gmpy ; extra == 'gmpy' +Provides-Extra: gmpy2 +Requires-Dist: gmpy2 ; extra == 'gmpy2' + +# Pure-Python ECDSA and ECDH + +[![Build Status](https://github.com/tlsfuzzer/python-ecdsa/workflows/GitHub%20CI/badge.svg?branch=master)](https://github.com/tlsfuzzer/python-ecdsa/actions?query=workflow%3A%22GitHub+CI%22+branch%3Amaster) +[![Coverage Status](https://coveralls.io/repos/github/tlsfuzzer/python-ecdsa/badge.svg?branch=master)](https://coveralls.io/github/tlsfuzzer/python-ecdsa?branch=master) +[![condition coverage](https://img.shields.io/badge/condition%20coverage-87%25-yellow)](https://travis-ci.com/github/tlsfuzzer/python-ecdsa/jobs/458951056#L544) +[![Language grade: Python](https://img.shields.io/lgtm/grade/python/g/tlsfuzzer/python-ecdsa.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/tlsfuzzer/python-ecdsa/context:python) +[![Total alerts](https://img.shields.io/lgtm/alerts/g/tlsfuzzer/python-ecdsa.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/tlsfuzzer/python-ecdsa/alerts/) +[![Latest Version](https://img.shields.io/pypi/v/ecdsa.svg?style=flat)](https://pypi.python.org/pypi/ecdsa/) +![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat) + + +This is an easy-to-use implementation of ECC (Elliptic Curve Cryptography) +with support for ECDSA (Elliptic Curve Digital Signature Algorithm) and ECDH +(Elliptic Curve Diffie-Hellman), implemented purely in Python, released under +the MIT license. With this library, you can quickly create keypairs (signing +key and verifying key), sign messages, and verify the signatures. You can +also agree on a shared secret key based on exchanged public keys. +The keys and signatures are very short, making them easy to handle and +incorporate into other protocols. + +## Features + +This library provides key generation, signing, verifying, and shared secret +derivation for five +popular NIST "Suite B" GF(p) (_prime field_) curves, with key lengths of 192, +224, 256, 384, and 521 bits. The "short names" for these curves, as known by +the OpenSSL tool (`openssl ecparam -list_curves`), are: `prime192v1`, +`secp224r1`, `prime256v1`, `secp384r1`, and `secp521r1`. It includes the +256-bit curve `secp256k1` used by Bitcoin. There is also support for the +regular (non-twisted) variants of Brainpool curves from 160 to 512 bits. The +"short names" of those curves are: `brainpoolP160r1`, `brainpoolP192r1`, +`brainpoolP224r1`, `brainpoolP256r1`, `brainpoolP320r1`, `brainpoolP384r1`, +`brainpoolP512r1`. Few of the small curves from SEC standard are also +included (mainly to speed-up testing of the library), those are: +`secp112r1`, `secp112r2`, `secp128r1`, and `secp160r1`. +No other curves are included, but it is not too hard to add support for more +curves over prime fields. + +## Dependencies + +This library uses only Python and the 'six' package. It is compatible with +Python 2.6, 2.7 and 3.3+. It also supports execution on the alternative +implementations like pypy and pypy3. + +If `gmpy2` or `gmpy` is installed, they will be used for faster arithmetic. +Either of them can be installed after this library is installed, +`python-ecdsa` will detect their presence on start-up and use them +automatically. +You should prefer `gmpy2` on Python3 for optimal performance. + +To run the OpenSSL compatibility tests, the 'openssl' tool must be in your +`PATH`. This release has been tested successfully against OpenSSL 0.9.8o, +1.0.0a, 1.0.2f and 1.1.1d (among others). + + +## Installation + +This library is available on PyPI, it's recommended to install it using `pip`: + +``` +pip install ecdsa +``` + +In case higher performance is wanted and using native code is not a problem, +it's possible to specify installation together with `gmpy2`: + +``` +pip install ecdsa[gmpy2] +``` + +or (slower, legacy option): +``` +pip install ecdsa[gmpy] +``` + +## Speed + +The following table shows how long this library takes to generate keypairs +(`keygen`), to sign data (`sign`), to verify those signatures (`verify`), +to derive a shared secret (`ecdh`), and +to verify the signatures with no key specific precomputation (`no PC verify`). +All those values are in seconds. +For convenience, the inverses of those values are also provided: +how many keys per second can be generated (`keygen/s`), how many signatures +can be made per second (`sign/s`), how many signatures can be verified +per second (`verify/s`), how many shared secrets can be derived per second +(`ecdh/s`), and how many signatures with no key specific +precomputation can be verified per second (`no PC verify/s`). The size of raw +signature (generally the smallest +way a signature can be encoded) is also provided in the `siglen` column. +Use `tox -e speed` to generate this table on your own computer. +On an Intel Core i7 4790K @ 4.0GHz I'm getting the following performance: + +``` + siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s + NIST192p: 48 0.00032s 3134.06 0.00033s 2985.53 0.00063s 1598.36 0.00129s 774.43 + NIST224p: 56 0.00040s 2469.24 0.00042s 2367.88 0.00081s 1233.41 0.00170s 586.66 + NIST256p: 64 0.00051s 1952.73 0.00054s 1867.80 0.00098s 1021.86 0.00212s 471.27 + NIST384p: 96 0.00107s 935.92 0.00111s 904.23 0.00203s 491.77 0.00446s 224.00 + NIST521p: 132 0.00210s 475.52 0.00215s 464.16 0.00398s 251.28 0.00874s 114.39 + SECP256k1: 64 0.00052s 1921.54 0.00054s 1847.49 0.00105s 948.68 0.00210s 477.01 + BRAINPOOLP160r1: 40 0.00025s 4003.88 0.00026s 3845.12 0.00053s 1893.93 0.00105s 949.92 + BRAINPOOLP192r1: 48 0.00033s 3043.97 0.00034s 2975.98 0.00063s 1581.50 0.00135s 742.29 + BRAINPOOLP224r1: 56 0.00041s 2436.44 0.00043s 2315.51 0.00078s 1278.49 0.00180s 556.16 + BRAINPOOLP256r1: 64 0.00053s 1892.49 0.00054s 1846.24 0.00114s 875.64 0.00229s 437.25 + BRAINPOOLP320r1: 80 0.00073s 1361.26 0.00076s 1309.25 0.00143s 699.29 0.00322s 310.49 + BRAINPOOLP384r1: 96 0.00107s 931.29 0.00111s 901.80 0.00230s 434.19 0.00476s 210.20 + BRAINPOOLP512r1: 128 0.00207s 483.41 0.00212s 471.42 0.00425s 235.43 0.00912s 109.61 + SECP112r1: 28 0.00015s 6672.53 0.00016s 6440.34 0.00031s 3265.41 0.00056s 1774.20 + SECP112r2: 28 0.00015s 6697.11 0.00015s 6479.98 0.00028s 3524.72 0.00058s 1716.16 + SECP128r1: 32 0.00018s 5497.65 0.00019s 5272.89 0.00036s 2747.39 0.00072s 1396.16 + SECP160r1: 42 0.00025s 3949.32 0.00026s 3894.45 0.00046s 2153.85 0.00102s 985.07 + + ecdh ecdh/s + NIST192p: 0.00104s 964.89 + NIST224p: 0.00134s 748.63 + NIST256p: 0.00170s 587.08 + NIST384p: 0.00352s 283.90 + NIST521p: 0.00717s 139.51 + SECP256k1: 0.00154s 648.40 + BRAINPOOLP160r1: 0.00082s 1220.70 + BRAINPOOLP192r1: 0.00105s 956.75 + BRAINPOOLP224r1: 0.00136s 734.52 + BRAINPOOLP256r1: 0.00178s 563.32 + BRAINPOOLP320r1: 0.00252s 397.23 + BRAINPOOLP384r1: 0.00376s 266.27 + BRAINPOOLP512r1: 0.00733s 136.35 + SECP112r1: 0.00046s 2180.40 + SECP112r2: 0.00045s 2229.14 + SECP128r1: 0.00054s 1868.15 + SECP160r1: 0.00080s 1243.98 +``` + +To test performance with `gmpy2` loaded, use `tox -e speedgmpy2`. +On the same machine I'm getting the following performance with `gmpy2`: +``` + siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s + NIST192p: 48 0.00017s 5933.40 0.00017s 5751.70 0.00032s 3125.28 0.00067s 1502.41 + NIST224p: 56 0.00021s 4782.87 0.00022s 4610.05 0.00040s 2487.04 0.00089s 1126.90 + NIST256p: 64 0.00023s 4263.98 0.00024s 4125.16 0.00045s 2200.88 0.00098s 1016.82 + NIST384p: 96 0.00041s 2449.54 0.00042s 2399.96 0.00083s 1210.57 0.00172s 581.43 + NIST521p: 132 0.00071s 1416.07 0.00072s 1389.81 0.00144s 692.93 0.00312s 320.40 + SECP256k1: 64 0.00024s 4245.05 0.00024s 4122.09 0.00045s 2206.40 0.00094s 1068.32 + BRAINPOOLP160r1: 40 0.00014s 6939.17 0.00015s 6681.55 0.00029s 3452.43 0.00057s 1769.81 + BRAINPOOLP192r1: 48 0.00017s 5920.05 0.00017s 5774.36 0.00034s 2979.00 0.00069s 1453.19 + BRAINPOOLP224r1: 56 0.00021s 4732.12 0.00022s 4622.65 0.00041s 2422.47 0.00087s 1149.87 + BRAINPOOLP256r1: 64 0.00024s 4233.02 0.00024s 4115.20 0.00047s 2143.27 0.00098s 1015.60 + BRAINPOOLP320r1: 80 0.00032s 3162.38 0.00032s 3077.62 0.00063s 1598.83 0.00136s 737.34 + BRAINPOOLP384r1: 96 0.00041s 2436.88 0.00042s 2395.62 0.00083s 1202.68 0.00178s 562.85 + BRAINPOOLP512r1: 128 0.00063s 1587.60 0.00064s 1558.83 0.00125s 799.96 0.00281s 355.83 + SECP112r1: 28 0.00009s 11118.66 0.00009s 10775.48 0.00018s 5456.00 0.00033s 3020.83 + SECP112r2: 28 0.00009s 11322.97 0.00009s 10857.71 0.00017s 5748.77 0.00032s 3094.28 + SECP128r1: 32 0.00010s 10078.39 0.00010s 9665.27 0.00019s 5200.58 0.00036s 2760.88 + SECP160r1: 42 0.00015s 6875.51 0.00015s 6647.35 0.00029s 3422.41 0.00057s 1768.35 + + ecdh ecdh/s + NIST192p: 0.00050s 1985.70 + NIST224p: 0.00066s 1524.16 + NIST256p: 0.00071s 1413.07 + NIST384p: 0.00127s 788.89 + NIST521p: 0.00230s 434.85 + SECP256k1: 0.00071s 1409.95 + BRAINPOOLP160r1: 0.00042s 2374.65 + BRAINPOOLP192r1: 0.00051s 1960.01 + BRAINPOOLP224r1: 0.00066s 1518.37 + BRAINPOOLP256r1: 0.00071s 1399.90 + BRAINPOOLP320r1: 0.00100s 997.21 + BRAINPOOLP384r1: 0.00129s 777.51 + BRAINPOOLP512r1: 0.00210s 475.99 + SECP112r1: 0.00022s 4457.70 + SECP112r2: 0.00024s 4252.33 + SECP128r1: 0.00028s 3589.31 + SECP160r1: 0.00043s 2305.02 +``` + +(there's also `gmpy` version, execute it using `tox -e speedgmpy`) + +For comparison, a highly optimised implementation (including curve-specific +assembly for some curves), like the one in OpenSSL 1.1.1d, provides following +performance numbers on the same machine. +Run `openssl speed ecdsa` and `openssl speed ecdh` to reproduce it: +``` + sign verify sign/s verify/s + 192 bits ecdsa (nistp192) 0.0002s 0.0002s 4785.6 5380.7 + 224 bits ecdsa (nistp224) 0.0000s 0.0001s 22475.6 9822.0 + 256 bits ecdsa (nistp256) 0.0000s 0.0001s 45069.6 14166.6 + 384 bits ecdsa (nistp384) 0.0008s 0.0006s 1265.6 1648.1 + 521 bits ecdsa (nistp521) 0.0003s 0.0005s 3753.1 1819.5 + 256 bits ecdsa (brainpoolP256r1) 0.0003s 0.0003s 2983.5 3333.2 + 384 bits ecdsa (brainpoolP384r1) 0.0008s 0.0007s 1258.8 1528.1 + 512 bits ecdsa (brainpoolP512r1) 0.0015s 0.0012s 675.1 860.1 + + op op/s + 192 bits ecdh (nistp192) 0.0002s 4853.4 + 224 bits ecdh (nistp224) 0.0001s 15252.1 + 256 bits ecdh (nistp256) 0.0001s 18436.3 + 384 bits ecdh (nistp384) 0.0008s 1292.7 + 521 bits ecdh (nistp521) 0.0003s 2884.7 + 256 bits ecdh (brainpoolP256r1) 0.0003s 3066.5 + 384 bits ecdh (brainpoolP384r1) 0.0008s 1298.0 + 512 bits ecdh (brainpoolP512r1) 0.0014s 694.8 +``` + +Keys and signature can be serialized in different ways (see Usage, below). +For a NIST192p key, the three basic representations require strings of the +following lengths (in bytes): + + to_string: signkey= 24, verifykey= 48, signature=48 + compressed: signkey=n/a, verifykey= 25, signature=n/a + DER: signkey=106, verifykey= 80, signature=55 + PEM: signkey=278, verifykey=162, (no support for PEM signatures) + +## History + +In 2006, Peter Pearson announced his pure-python implementation of ECDSA in a +[message to sci.crypt][1], available from his [download site][2]. In 2010, +Brian Warner wrote a wrapper around this code, to make it a bit easier and +safer to use. In 2020, Hubert Kario included an implementation of elliptic +curve cryptography that uses Jacobian coordinates internally, improving +performance about 20-fold. You are looking at the README for this wrapper. + +[1]: http://www.derkeiler.com/Newsgroups/sci.crypt/2006-01/msg00651.html +[2]: http://webpages.charter.net/curryfans/peter/downloads.html + +## Testing + +To run the full test suite, do this: + + tox -e coverage + +On an Intel Core i7 4790K @ 4.0GHz, the tests take about 18 seconds to execute. +The test suite uses +[`hypothesis`](https://github.com/HypothesisWorks/hypothesis) so there is some +inherent variability in the test suite execution time. + +One part of `test_pyecdsa.py` and `test_ecdh.py` checks compatibility with +OpenSSL, by running the "openssl" CLI tool, make sure it's in your `PATH` if +you want to test compatibility with it (if OpenSSL is missing, too old, or +doesn't support all the curves supported in upstream releases you will see +skipped tests in the above `coverage` run). + +## Security + +This library was not designed with security in mind. If you are processing +data that needs to be protected we suggest you use a quality wrapper around +OpenSSL. [pyca/cryptography](https://cryptography.io) is one example of such +a wrapper. The primary use-case of this library is as a portable library for +interoperability testing and as a teaching tool. + +**This library does not protect against side channel attacks.** + +Do not allow attackers to measure how long it takes you to generate a keypair +or sign a message. Do not allow attackers to run code on the same physical +machine when keypair generation or signing is taking place (this includes +virtual machines). Do not allow attackers to measure how much power your +computer uses while generating the keypair or signing a message. Do not allow +attackers to measure RF interference coming from your computer while generating +a keypair or signing a message. Note: just loading the private key will cause +keypair generation. Other operations or attack vectors may also be +vulnerable to attacks. **For a sophisticated attacker observing just one +operation with a private key will be sufficient to completely +reconstruct the private key**. + +Please also note that any Pure-python cryptographic library will be vulnerable +to the same side channel attacks. This is because Python does not provide +side-channel secure primitives (with the exception of +[`hmac.compare_digest()`][3]), making side-channel secure programming +impossible. + +This library depends upon a strong source of random numbers. Do not use it on +a system where `os.urandom()` does not provide cryptographically secure +random numbers. + +[3]: https://docs.python.org/3/library/hmac.html#hmac.compare_digest + +## Usage + +You start by creating a `SigningKey`. You can use this to sign data, by passing +in data as a byte string and getting back the signature (also a byte string). +You can also ask a `SigningKey` to give you the corresponding `VerifyingKey`. +The `VerifyingKey` can be used to verify a signature, by passing it both the +data string and the signature byte string: it either returns True or raises +`BadSignatureError`. + +```python +from ecdsa import SigningKey +sk = SigningKey.generate() # uses NIST192p +vk = sk.verifying_key +signature = sk.sign(b"message") +assert vk.verify(signature, b"message") +``` + +Each `SigningKey`/`VerifyingKey` is associated with a specific curve, like +NIST192p (the default one). Longer curves are more secure, but take longer to +use, and result in longer keys and signatures. + +```python +from ecdsa import SigningKey, NIST384p +sk = SigningKey.generate(curve=NIST384p) +vk = sk.verifying_key +signature = sk.sign(b"message") +assert vk.verify(signature, b"message") +``` + +The `SigningKey` can be serialized into several different formats: the shortest +is to call `s=sk.to_string()`, and then re-create it with +`SigningKey.from_string(s, curve)` . This short form does not record the +curve, so you must be sure to pass to `from_string()` the same curve you used +for the original key. The short form of a NIST192p-based signing key is just 24 +bytes long. If a point encoding is invalid or it does not lie on the specified +curve, `from_string()` will raise `MalformedPointError`. + +```python +from ecdsa import SigningKey, NIST384p +sk = SigningKey.generate(curve=NIST384p) +sk_string = sk.to_string() +sk2 = SigningKey.from_string(sk_string, curve=NIST384p) +print(sk_string.hex()) +print(sk2.to_string().hex()) +``` + +Note: while the methods are called `to_string()` the type they return is +actually `bytes`, the "string" part is leftover from Python 2. + +`sk.to_pem()` and `sk.to_der()` will serialize the signing key into the same +formats that OpenSSL uses. The PEM file looks like the familiar ASCII-armored +`"-----BEGIN EC PRIVATE KEY-----"` base64-encoded format, and the DER format +is a shorter binary form of the same data. +`SigningKey.from_pem()/.from_der()` will undo this serialization. These +formats include the curve name, so you do not need to pass in a curve +identifier to the deserializer. In case the file is malformed `from_der()` +and `from_pem()` will raise `UnexpectedDER` or` MalformedPointError`. + +```python +from ecdsa import SigningKey, NIST384p +sk = SigningKey.generate(curve=NIST384p) +sk_pem = sk.to_pem() +sk2 = SigningKey.from_pem(sk_pem) +# sk and sk2 are the same key +``` + +Likewise, the `VerifyingKey` can be serialized in the same way: +`vk.to_string()/VerifyingKey.from_string()`, `to_pem()/from_pem()`, and +`to_der()/from_der()`. The same `curve=` argument is needed for +`VerifyingKey.from_string()`. + +```python +from ecdsa import SigningKey, VerifyingKey, NIST384p +sk = SigningKey.generate(curve=NIST384p) +vk = sk.verifying_key +vk_string = vk.to_string() +vk2 = VerifyingKey.from_string(vk_string, curve=NIST384p) +# vk and vk2 are the same key + +from ecdsa import SigningKey, VerifyingKey, NIST384p +sk = SigningKey.generate(curve=NIST384p) +vk = sk.verifying_key +vk_pem = vk.to_pem() +vk2 = VerifyingKey.from_pem(vk_pem) +# vk and vk2 are the same key +``` + +There are a couple of different ways to compute a signature. Fundamentally, +ECDSA takes a number that represents the data being signed, and returns a +pair of numbers that represent the signature. The `hashfunc=` argument to +`sk.sign()` and `vk.verify()` is used to turn an arbitrary string into +fixed-length digest, which is then turned into a number that ECDSA can sign, +and both sign and verify must use the same approach. The default value is +`hashlib.sha1`, but if you use NIST256p or a longer curve, you can use +`hashlib.sha256` instead. + +There are also multiple ways to represent a signature. The default +`sk.sign()` and `vk.verify()` methods present it as a short string, for +simplicity and minimal overhead. To use a different scheme, use the +`sk.sign(sigencode=)` and `vk.verify(sigdecode=)` arguments. There are helper +functions in the `ecdsa.util` module that can be useful here. + +It is also possible to create a `SigningKey` from a "seed", which is +deterministic. This can be used in protocols where you want to derive +consistent signing keys from some other secret, for example when you want +three separate keys and only want to store a single master secret. You should +start with a uniformly-distributed unguessable seed with about `curve.baselen` +bytes of entropy, and then use one of the helper functions in `ecdsa.util` to +convert it into an integer in the correct range, and then finally pass it +into `SigningKey.from_secret_exponent()`, like this: + +```python +import os +from ecdsa import NIST384p, SigningKey +from ecdsa.util import randrange_from_seed__trytryagain + +def make_key(seed): + secexp = randrange_from_seed__trytryagain(seed, NIST384p.order) + return SigningKey.from_secret_exponent(secexp, curve=NIST384p) + +seed = os.urandom(NIST384p.baselen) # or other starting point +sk1a = make_key(seed) +sk1b = make_key(seed) +# note: sk1a and sk1b are the same key +assert sk1a.to_string() == sk1b.to_string() +sk2 = make_key(b"2-"+seed) # different key +assert sk1a.to_string() != sk2.to_string() +``` + +In case the application will verify a lot of signatures made with a single +key, it's possible to precompute some of the internal values to make +signature verification significantly faster. The break-even point occurs at +about 100 signatures verified. + +To perform precomputation, you can call the `precompute()` method +on `VerifyingKey` instance: +```python +from ecdsa import SigningKey, NIST384p +sk = SigningKey.generate(curve=NIST384p) +vk = sk.verifying_key +vk.precompute() +signature = sk.sign(b"message") +assert vk.verify(signature, b"message") +``` + +Once `precompute()` was called, all signature verifications with this key will +be faster to execute. + +## OpenSSL Compatibility + +To produce signatures that can be verified by OpenSSL tools, or to verify +signatures that were produced by those tools, use: + +```python +# openssl ecparam -name prime256v1 -genkey -out sk.pem +# openssl ec -in sk.pem -pubout -out vk.pem +# echo "data for signing" > data +# openssl dgst -sha256 -sign sk.pem -out data.sig data +# openssl dgst -sha256 -verify vk.pem -signature data.sig data +# openssl dgst -sha256 -prverify sk.pem -signature data.sig data + +import hashlib +from ecdsa import SigningKey, VerifyingKey +from ecdsa.util import sigencode_der, sigdecode_der + +with open("vk.pem") as f: + vk = VerifyingKey.from_pem(f.read()) + +with open("data", "rb") as f: + data = f.read() + +with open("data.sig", "rb") as f: + signature = f.read() + +assert vk.verify(signature, data, hashlib.sha256, sigdecode=sigdecode_der) + +with open("sk.pem") as f: + sk = SigningKey.from_pem(f.read(), hashlib.sha256) + +new_signature = sk.sign_deterministic(data, sigencode=sigencode_der) + +with open("data.sig2", "wb") as f: + f.write(new_signature) + +# openssl dgst -sha256 -verify vk.pem -signature data.sig2 data +``` + +Note: if compatibility with OpenSSL 1.0.0 or earlier is necessary, the +`sigencode_string` and `sigdecode_string` from `ecdsa.util` can be used for +respectively writing and reading the signatures. + +The keys also can be written in format that openssl can handle: + +```python +from ecdsa import SigningKey, VerifyingKey + +with open("sk.pem") as f: + sk = SigningKey.from_pem(f.read()) +with open("sk.pem", "wb") as f: + f.write(sk.to_pem()) + +with open("vk.pem") as f: + vk = VerifyingKey.from_pem(f.read()) +with open("vk.pem", "wb") as f: + f.write(vk.to_pem()) +``` + +## Entropy + +Creating a signing key with `SigningKey.generate()` requires some form of +entropy (as opposed to +`from_secret_exponent`/`from_string`/`from_der`/`from_pem`, +which are deterministic and do not require an entropy source). The default +source is `os.urandom()`, but you can pass any other function that behaves +like `os.urandom` as the `entropy=` argument to do something different. This +may be useful in unit tests, where you want to achieve repeatable results. The +`ecdsa.util.PRNG` utility is handy here: it takes a seed and produces a strong +pseudo-random stream from it: + +```python +from ecdsa.util import PRNG +from ecdsa import SigningKey +rng1 = PRNG(b"seed") +sk1 = SigningKey.generate(entropy=rng1) +rng2 = PRNG(b"seed") +sk2 = SigningKey.generate(entropy=rng2) +# sk1 and sk2 are the same key +``` + +Likewise, ECDSA signature generation requires a random number, and each +signature must use a different one (using the same number twice will +immediately reveal the private signing key). The `sk.sign()` method takes an +`entropy=` argument which behaves the same as `SigningKey.generate(entropy=)`. + +## Deterministic Signatures + +If you call `SigningKey.sign_deterministic(data)` instead of `.sign(data)`, +the code will generate a deterministic signature instead of a random one. +This uses the algorithm from RFC6979 to safely generate a unique `k` value, +derived from the private key and the message being signed. Each time you sign +the same message with the same key, you will get the same signature (using +the same `k`). + +This may become the default in a future version, as it is not vulnerable to +failures of the entropy source. + +## Examples + +Create a NIST192p keypair and immediately save both to disk: + +```python +from ecdsa import SigningKey +sk = SigningKey.generate() +vk = sk.verifying_key +with open("private.pem", "wb") as f: + f.write(sk.to_pem()) +with open("public.pem", "wb") as f: + f.write(vk.to_pem()) +``` + +Load a signing key from disk, use it to sign a message (using SHA-1), and write +the signature to disk: + +```python +from ecdsa import SigningKey +with open("private.pem") as f: + sk = SigningKey.from_pem(f.read()) +with open("message", "rb") as f: + message = f.read() +sig = sk.sign(message) +with open("signature", "wb") as f: + f.write(sig) +``` + +Load the verifying key, message, and signature from disk, and verify the +signature (assume SHA-1 hash): + +```python +from ecdsa import VerifyingKey, BadSignatureError +vk = VerifyingKey.from_pem(open("public.pem").read()) +with open("message", "rb") as f: + message = f.read() +with open("signature", "rb") as f: + sig = f.read() +try: + vk.verify(sig, message) + print "good signature" +except BadSignatureError: + print "BAD SIGNATURE" +``` + +Create a NIST521p keypair: + +```python +from ecdsa import SigningKey, NIST521p +sk = SigningKey.generate(curve=NIST521p) +vk = sk.verifying_key +``` + +Create three independent signing keys from a master seed: + +```python +from ecdsa import NIST192p, SigningKey +from ecdsa.util import randrange_from_seed__trytryagain + +def make_key_from_seed(seed, curve=NIST192p): + secexp = randrange_from_seed__trytryagain(seed, curve.order) + return SigningKey.from_secret_exponent(secexp, curve) + +sk1 = make_key_from_seed("1:%s" % seed) +sk2 = make_key_from_seed("2:%s" % seed) +sk3 = make_key_from_seed("3:%s" % seed) +``` + +Load a verifying key from disk and print it using hex encoding in +uncompressed and compressed format (defined in X9.62 and SEC1 standards): + +```python +from ecdsa import VerifyingKey + +with open("public.pem") as f: + vk = VerifyingKey.from_pem(f.read()) + +print("uncompressed: {0}".format(vk.to_string("uncompressed").hex())) +print("compressed: {0}".format(vk.to_string("compressed").hex())) +``` + +Load a verifying key from a hex string from compressed format, output +uncompressed: + +```python +from ecdsa import VerifyingKey, NIST256p + +comp_str = '022799c0d0ee09772fdd337d4f28dc155581951d07082fb19a38aa396b67e77759' +vk = VerifyingKey.from_string(bytearray.fromhex(comp_str), curve=NIST256p) +print(vk.to_string("uncompressed").hex()) +``` + +ECDH key exchange with remote party: + +```python +from ecdsa import ECDH, NIST256p + +ecdh = ECDH(curve=NIST256p) +ecdh.generate_private_key() +local_public_key = ecdh.get_public_key() +#send `local_public_key` to remote party and receive `remote_public_key` from remote party +with open("remote_public_key.pem") as e: + remote_public_key = e.read() +ecdh.load_received_public_key_pem(remote_public_key) +secret = ecdh.generate_sharedsecret_bytes() +``` + + diff --git a/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/RECORD new file mode 100644 index 0000000..1185866 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/RECORD @@ -0,0 +1,56 @@ +ecdsa-0.17.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +ecdsa-0.17.0.dist-info/LICENSE,sha256=PsqYRXc9LluMydjBGdNF8ApIBuS9Zg1KPWzfnA6di7I,1147 +ecdsa-0.17.0.dist-info/METADATA,sha256=QgdkMo-Z0HsjHuFRckTK5NZZx6_yJ6eBpH_z3oG4ef4,28533 +ecdsa-0.17.0.dist-info/RECORD,, +ecdsa-0.17.0.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 +ecdsa-0.17.0.dist-info/top_level.txt,sha256=7ovPHfAPyTou19f8gOSbHm6B9dGjTibWolcCB7Zjovs,6 +ecdsa/__init__.py,sha256=1h7cRMgiNoqlMYJ5d0Af9_BKkGww3uuLrOUmqz7JBF0,1610 +ecdsa/__pycache__/__init__.cpython-310.pyc,, +ecdsa/__pycache__/_compat.cpython-310.pyc,, +ecdsa/__pycache__/_rwlock.cpython-310.pyc,, +ecdsa/__pycache__/_version.cpython-310.pyc,, +ecdsa/__pycache__/curves.cpython-310.pyc,, +ecdsa/__pycache__/der.cpython-310.pyc,, +ecdsa/__pycache__/ecdh.cpython-310.pyc,, +ecdsa/__pycache__/ecdsa.cpython-310.pyc,, +ecdsa/__pycache__/ellipticcurve.cpython-310.pyc,, +ecdsa/__pycache__/errors.cpython-310.pyc,, +ecdsa/__pycache__/keys.cpython-310.pyc,, +ecdsa/__pycache__/numbertheory.cpython-310.pyc,, +ecdsa/__pycache__/rfc6979.cpython-310.pyc,, +ecdsa/__pycache__/test_curves.cpython-310.pyc,, +ecdsa/__pycache__/test_der.cpython-310.pyc,, +ecdsa/__pycache__/test_ecdh.cpython-310.pyc,, +ecdsa/__pycache__/test_ecdsa.cpython-310.pyc,, +ecdsa/__pycache__/test_ellipticcurve.cpython-310.pyc,, +ecdsa/__pycache__/test_jacobi.cpython-310.pyc,, +ecdsa/__pycache__/test_keys.cpython-310.pyc,, +ecdsa/__pycache__/test_malformed_sigs.cpython-310.pyc,, +ecdsa/__pycache__/test_numbertheory.cpython-310.pyc,, +ecdsa/__pycache__/test_pyecdsa.cpython-310.pyc,, +ecdsa/__pycache__/test_rw_lock.cpython-310.pyc,, +ecdsa/__pycache__/util.cpython-310.pyc,, +ecdsa/_compat.py,sha256=yTg9_CfiuVSuIMZm934gQWOLWtagQCfhDex8PHdfCl8,1774 +ecdsa/_rwlock.py,sha256=CAwHp2V65ksI8B1UqY7EccK9LaUToiv6pDLVzm44eag,2849 +ecdsa/_version.py,sha256=dXVVsGiAz_a3g_MBS1p06Bx1HEoOj5vKRUkoJhVfxL8,498 +ecdsa/curves.py,sha256=3I0u0r-lCZgvN156oo6fSxCjNQYFSTyVjLSA1JYfgJs,12266 +ecdsa/der.py,sha256=p_6j-998zGguHdgf4LFWAeXLpvU7q57tmECiN8YviKU,14113 +ecdsa/ecdh.py,sha256=1DksrUSHuJdtlH9ut1FcU3OeeD8n-AMq1zpu7L1D4cw,11069 +ecdsa/ecdsa.py,sha256=vHqBojVg5BW526wYFmW-ORwSHL_93lzOUuEbhSK4pbs,24283 +ecdsa/ellipticcurve.py,sha256=yMyfhpJan3WAQPVc_37UkYeRs7GrirXNT5D08zRVmx4,39435 +ecdsa/errors.py,sha256=b4mhnmIpRnEdHzbectHAA5F7O9MtSaI-fYoc13_vBxQ,130 +ecdsa/keys.py,sha256=9eU2BNbpnKrLx8fEX7qQRPN1k_siyUVllw8tJOrvbOY,60650 +ecdsa/numbertheory.py,sha256=rDXcLU8RO9TcTVV3a8nRvBvmxvjl97mtEnPK6fhZ0Bg,17349 +ecdsa/rfc6979.py,sha256=x6dCO2l3QlPLHE-w3c456rqthhSBP03dCnTfSweBgBU,2725 +ecdsa/test_curves.py,sha256=rrKpV-bxsPwgYv_s1lUmIY9DLoIbzQkha9gCm1NQ9EU,11670 +ecdsa/test_der.py,sha256=Y2sfC4pAYds-pli8-HYZ6f9cnPrOGAh-aJtzkxWPWdQ,14958 +ecdsa/test_ecdh.py,sha256=6EHT9QR9BAuzlv6OgwrYsgX-TQCGeaqWzwfBGIuizyA,14965 +ecdsa/test_ecdsa.py,sha256=1clBfDtA0zdF-13BoKXsJffL5K-iFutlMgov0kmGro0,23923 +ecdsa/test_ellipticcurve.py,sha256=K6W_EQunOfE-RVSux6d1O7LXzSuAsVk9F1elwyY-rYA,6085 +ecdsa/test_jacobi.py,sha256=BvWEpf7jugXfAOV2jI65G12_ZT3TmRmN0SDDt6eW_xc,18404 +ecdsa/test_keys.py,sha256=tlPIoGGQI4bB_qHZnRgFqjCgSeZeWkQggy90YwWBaA0,22423 +ecdsa/test_malformed_sigs.py,sha256=1vHzX0EAeGh7XYxY-glKVMC2XnZba-DGUCh7U09eJao,10262 +ecdsa/test_numbertheory.py,sha256=f5TEMDQ-4gcrTiuYCrlKv27yeZ-qgRnvxPKv57AGIT4,10881 +ecdsa/test_pyecdsa.py,sha256=5PIfzm_blRjutrt4YYqybll2XAATWr4vcKA4-EANv88,79320 +ecdsa/test_rw_lock.py,sha256=byv0_FTM90cbuHPCI6__LeQJkHL_zYEeVYIBO8e2LLc,7021 +ecdsa/util.py,sha256=Bhr8WH0I_Au1fvHsxElBJwClhHJla9Ca2mmN25i-feE,14620 diff --git a/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/WHEEL new file mode 100644 index 0000000..8b701e9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.6) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/top_level.txt new file mode 100644 index 0000000..aa5efdb --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa-0.17.0.dist-info/top_level.txt @@ -0,0 +1 @@ +ecdsa diff --git a/venv/lib/python3.10/site-packages/ecdsa/__init__.py b/venv/lib/python3.10/site-packages/ecdsa/__init__.py new file mode 100644 index 0000000..08adeee --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/__init__.py @@ -0,0 +1,88 @@ +# while we don't use six in this file, we did bundle it for a long time, so +# keep as part of module in a virtual way (through __all__) +import six +from .keys import ( + SigningKey, + VerifyingKey, + BadSignatureError, + BadDigestError, + MalformedPointError, +) +from .curves import ( + NIST192p, + NIST224p, + NIST256p, + NIST384p, + NIST521p, + SECP256k1, + BRAINPOOLP160r1, + BRAINPOOLP192r1, + BRAINPOOLP224r1, + BRAINPOOLP256r1, + BRAINPOOLP320r1, + BRAINPOOLP384r1, + BRAINPOOLP512r1, + SECP112r1, + SECP112r2, + SECP128r1, + SECP160r1, +) +from .ecdh import ( + ECDH, + NoKeyError, + NoCurveError, + InvalidCurveError, + InvalidSharedSecretError, +) +from .der import UnexpectedDER + +# This code comes from http://github.com/tlsfuzzer/python-ecdsa +from ._version import get_versions + +__version__ = get_versions()["version"] +del get_versions + +__all__ = [ + "curves", + "der", + "ecdsa", + "ellipticcurve", + "keys", + "numbertheory", + "test_pyecdsa", + "util", + "six", +] + +_hush_pyflakes = [ + SigningKey, + VerifyingKey, + BadSignatureError, + BadDigestError, + MalformedPointError, + UnexpectedDER, + InvalidCurveError, + NoKeyError, + InvalidSharedSecretError, + ECDH, + NoCurveError, + NIST192p, + NIST224p, + NIST256p, + NIST384p, + NIST521p, + SECP256k1, + BRAINPOOLP160r1, + BRAINPOOLP192r1, + BRAINPOOLP224r1, + BRAINPOOLP256r1, + BRAINPOOLP320r1, + BRAINPOOLP384r1, + BRAINPOOLP512r1, + SECP112r1, + SECP112r2, + SECP128r1, + SECP160r1, + six.b(""), +] +del _hush_pyflakes diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..b8b6ab2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/_compat.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/_compat.cpython-310.pyc new file mode 100644 index 0000000..1313134 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/_compat.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/_rwlock.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/_rwlock.cpython-310.pyc new file mode 100644 index 0000000..990d7dd Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/_rwlock.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/_version.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/_version.cpython-310.pyc new file mode 100644 index 0000000..c3aef99 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/_version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/curves.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/curves.cpython-310.pyc new file mode 100644 index 0000000..a3ac586 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/curves.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/der.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/der.cpython-310.pyc new file mode 100644 index 0000000..b85225a Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/der.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/ecdh.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/ecdh.cpython-310.pyc new file mode 100644 index 0000000..2af7fd1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/ecdh.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/ecdsa.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/ecdsa.cpython-310.pyc new file mode 100644 index 0000000..5e00e87 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/ecdsa.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/ellipticcurve.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/ellipticcurve.cpython-310.pyc new file mode 100644 index 0000000..0bd9df9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/ellipticcurve.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/errors.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/errors.cpython-310.pyc new file mode 100644 index 0000000..06c132a Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/errors.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/keys.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/keys.cpython-310.pyc new file mode 100644 index 0000000..639a51f Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/keys.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/numbertheory.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/numbertheory.cpython-310.pyc new file mode 100644 index 0000000..1fc404e Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/numbertheory.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/rfc6979.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/rfc6979.cpython-310.pyc new file mode 100644 index 0000000..0f2e8ff Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/rfc6979.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_curves.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_curves.cpython-310.pyc new file mode 100644 index 0000000..4f33266 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_curves.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_der.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_der.cpython-310.pyc new file mode 100644 index 0000000..b28f4c6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_der.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_ecdh.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_ecdh.cpython-310.pyc new file mode 100644 index 0000000..2dbc985 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_ecdh.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_ecdsa.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_ecdsa.cpython-310.pyc new file mode 100644 index 0000000..2097124 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_ecdsa.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_ellipticcurve.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_ellipticcurve.cpython-310.pyc new file mode 100644 index 0000000..b979d6b Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_ellipticcurve.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_jacobi.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_jacobi.cpython-310.pyc new file mode 100644 index 0000000..2b5f3e4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_jacobi.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_keys.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_keys.cpython-310.pyc new file mode 100644 index 0000000..6dbf6ff Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_keys.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_malformed_sigs.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_malformed_sigs.cpython-310.pyc new file mode 100644 index 0000000..65fbb8e Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_malformed_sigs.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_numbertheory.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_numbertheory.cpython-310.pyc new file mode 100644 index 0000000..002c027 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_numbertheory.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_pyecdsa.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_pyecdsa.cpython-310.pyc new file mode 100644 index 0000000..7eaefe2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_pyecdsa.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_rw_lock.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_rw_lock.cpython-310.pyc new file mode 100644 index 0000000..6e61632 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/test_rw_lock.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/__pycache__/util.cpython-310.pyc b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000..22a2e6b Binary files /dev/null and b/venv/lib/python3.10/site-packages/ecdsa/__pycache__/util.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ecdsa/_compat.py b/venv/lib/python3.10/site-packages/ecdsa/_compat.py new file mode 100644 index 0000000..d773e75 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/_compat.py @@ -0,0 +1,64 @@ +""" +Common functions for providing cross-python version compatibility. +""" +import sys +import re +from six import integer_types + + +def str_idx_as_int(string, index): + """Take index'th byte from string, return as integer""" + val = string[index] + if isinstance(val, integer_types): + return val + return ord(val) + + +if sys.version_info < (3, 0): # pragma: no branch + + def normalise_bytes(buffer_object): + """Cast the input into array of bytes.""" + # flake8 runs on py3 where `buffer` indeed doesn't exist... + return buffer(buffer_object) # noqa: F821 + + def hmac_compat(ret): + return ret + + if sys.version_info < (2, 7) or sys.version_info < ( # pragma: no branch + 2, + 7, + 4, + ): + + def remove_whitespace(text): + """Removes all whitespace from passed in string""" + return re.sub(r"\s+", "", text) + + else: + + def remove_whitespace(text): + """Removes all whitespace from passed in string""" + return re.sub(r"\s+", "", text, flags=re.UNICODE) + + +else: + if sys.version_info < (3, 4): # pragma: no branch + # on python 3.3 hmac.hmac.update() accepts only bytes, on newer + # versions it does accept memoryview() also + def hmac_compat(data): + if not isinstance(data, bytes): # pragma: no branch + return bytes(data) + return data + + else: + + def hmac_compat(data): + return data + + def normalise_bytes(buffer_object): + """Cast the input into array of bytes.""" + return memoryview(buffer_object).cast("B") + + def remove_whitespace(text): + """Removes all whitespace from passed in string""" + return re.sub(r"\s+", "", text, flags=re.UNICODE) diff --git a/venv/lib/python3.10/site-packages/ecdsa/_rwlock.py b/venv/lib/python3.10/site-packages/ecdsa/_rwlock.py new file mode 100644 index 0000000..010e498 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/_rwlock.py @@ -0,0 +1,86 @@ +# Copyright Mateusz Kobos, (c) 2011 +# https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/ +# released under the MIT licence + +import threading + + +__author__ = "Mateusz Kobos" + + +class RWLock: + """ + Read-Write locking primitive + + Synchronization object used in a solution of so-called second + readers-writers problem. In this problem, many readers can simultaneously + access a share, and a writer has an exclusive access to this share. + Additionally, the following constraints should be met: + 1) no reader should be kept waiting if the share is currently opened for + reading unless a writer is also waiting for the share, + 2) no writer should be kept waiting for the share longer than absolutely + necessary. + + The implementation is based on [1, secs. 4.2.2, 4.2.6, 4.2.7] + with a modification -- adding an additional lock (C{self.__readers_queue}) + -- in accordance with [2]. + + Sources: + [1] A.B. Downey: "The little book of semaphores", Version 2.1.5, 2008 + [2] P.J. Courtois, F. Heymans, D.L. Parnas: + "Concurrent Control with 'Readers' and 'Writers'", + Communications of the ACM, 1971 (via [3]) + [3] http://en.wikipedia.org/wiki/Readers-writers_problem + """ + + def __init__(self): + """ + A lock giving an even higher priority to the writer in certain + cases (see [2] for a discussion). + """ + self.__read_switch = _LightSwitch() + self.__write_switch = _LightSwitch() + self.__no_readers = threading.Lock() + self.__no_writers = threading.Lock() + self.__readers_queue = threading.Lock() + + def reader_acquire(self): + self.__readers_queue.acquire() + self.__no_readers.acquire() + self.__read_switch.acquire(self.__no_writers) + self.__no_readers.release() + self.__readers_queue.release() + + def reader_release(self): + self.__read_switch.release(self.__no_writers) + + def writer_acquire(self): + self.__write_switch.acquire(self.__no_readers) + self.__no_writers.acquire() + + def writer_release(self): + self.__no_writers.release() + self.__write_switch.release(self.__no_readers) + + +class _LightSwitch: + """An auxiliary "light switch"-like object. The first thread turns on the + "switch", the last one turns it off (see [1, sec. 4.2.2] for details).""" + + def __init__(self): + self.__counter = 0 + self.__mutex = threading.Lock() + + def acquire(self, lock): + self.__mutex.acquire() + self.__counter += 1 + if self.__counter == 1: + lock.acquire() + self.__mutex.release() + + def release(self, lock): + self.__mutex.acquire() + self.__counter -= 1 + if self.__counter == 0: + lock.release() + self.__mutex.release() diff --git a/venv/lib/python3.10/site-packages/ecdsa/_version.py b/venv/lib/python3.10/site-packages/ecdsa/_version.py new file mode 100644 index 0000000..bc85519 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/_version.py @@ -0,0 +1,21 @@ + +# This file was generated by 'versioneer.py' (0.17) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +{ + "date": "2021-05-27T20:07:15+0200", + "dirty": false, + "error": null, + "full-revisionid": "5aa87c52c25a476f691b570be3577ff71cd01982", + "version": "0.17.0" +} +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) diff --git a/venv/lib/python3.10/site-packages/ecdsa/curves.py b/venv/lib/python3.10/site-packages/ecdsa/curves.py new file mode 100644 index 0000000..7a4a530 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/curves.py @@ -0,0 +1,440 @@ +from __future__ import division + +from six import PY2 +from . import der, ecdsa, ellipticcurve +from .util import orderlen, number_to_string, string_to_number +from ._compat import normalise_bytes + + +# orderlen was defined in this module previously, so keep it in __all__, +# will need to mark it as deprecated later +__all__ = [ + "UnknownCurveError", + "orderlen", + "Curve", + "SECP112r1", + "SECP112r2", + "SECP128r1", + "SECP160r1", + "NIST192p", + "NIST224p", + "NIST256p", + "NIST384p", + "NIST521p", + "curves", + "find_curve", + "SECP256k1", + "BRAINPOOLP160r1", + "BRAINPOOLP192r1", + "BRAINPOOLP224r1", + "BRAINPOOLP256r1", + "BRAINPOOLP320r1", + "BRAINPOOLP384r1", + "BRAINPOOLP512r1", + "PRIME_FIELD_OID", + "CHARACTERISTIC_TWO_FIELD_OID", +] + + +PRIME_FIELD_OID = (1, 2, 840, 10045, 1, 1) +CHARACTERISTIC_TWO_FIELD_OID = (1, 2, 840, 10045, 1, 2) + + +class UnknownCurveError(Exception): + pass + + +class Curve: + def __init__(self, name, curve, generator, oid, openssl_name=None): + self.name = name + self.openssl_name = openssl_name # maybe None + self.curve = curve + self.generator = generator + self.order = generator.order() + self.baselen = orderlen(self.order) + self.verifying_key_length = 2 * orderlen(curve.p()) + self.signature_length = 2 * self.baselen + self.oid = oid + if oid: + self.encoded_oid = der.encode_oid(*oid) + + def __eq__(self, other): + if isinstance(other, Curve): + return ( + self.curve == other.curve and self.generator == other.generator + ) + return NotImplemented + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return self.name + + def to_der(self, encoding=None, point_encoding="uncompressed"): + """Serialise the curve parameters to binary string. + + :param str encoding: the format to save the curve parameters in. + Default is ``named_curve``, with fallback being the ``explicit`` + if the OID is not set for the curve. + :param str point_encoding: the point encoding of the generator when + explicit curve encoding is used. Ignored for ``named_curve`` + format. + + :return: DER encoded ECParameters structure + :rtype: bytes + """ + if encoding is None: + if self.oid: + encoding = "named_curve" + else: + encoding = "explicit" + + if encoding == "named_curve": + if not self.oid: + raise UnknownCurveError( + "Can't encode curve using named_curve encoding without " + "associated curve OID" + ) + return der.encode_oid(*self.oid) + + # encode the ECParameters sequence + curve_p = self.curve.p() + version = der.encode_integer(1) + field_id = der.encode_sequence( + der.encode_oid(*PRIME_FIELD_OID), der.encode_integer(curve_p) + ) + curve = der.encode_sequence( + der.encode_octet_string( + number_to_string(self.curve.a() % curve_p, curve_p) + ), + der.encode_octet_string( + number_to_string(self.curve.b() % curve_p, curve_p) + ), + ) + base = der.encode_octet_string(self.generator.to_bytes(point_encoding)) + order = der.encode_integer(self.generator.order()) + seq_elements = [version, field_id, curve, base, order] + if self.curve.cofactor(): + cofactor = der.encode_integer(self.curve.cofactor()) + seq_elements.append(cofactor) + + return der.encode_sequence(*seq_elements) + + def to_pem(self, encoding=None, point_encoding="uncompressed"): + """ + Serialise the curve parameters to the :term:`PEM` format. + + :param str encoding: the format to save the curve parameters in. + Default is ``named_curve``, with fallback being the ``explicit`` + if the OID is not set for the curve. + :param str point_encoding: the point encoding of the generator when + explicit curve encoding is used. Ignored for ``named_curve`` + format. + + :return: PEM encoded ECParameters structure + :rtype: str + """ + return der.topem( + self.to_der(encoding, point_encoding), "EC PARAMETERS" + ) + + @staticmethod + def from_der(data, valid_encodings=None): + """Decode the curve parameters from DER file. + + :param data: the binary string to decode the parameters from + :type data: :term:`bytes-like object` + :param valid_encodings: set of names of allowed encodings, by default + all (set by passing ``None``), supported ones are ``named_curve`` + and ``explicit`` + :type valid_encodings: :term:`set-like object` + """ + if not valid_encodings: + valid_encodings = set(("named_curve", "explicit")) + if not all(i in ["named_curve", "explicit"] for i in valid_encodings): + raise ValueError( + "Only named_curve and explicit encodings supported" + ) + data = normalise_bytes(data) + if not der.is_sequence(data): + if "named_curve" not in valid_encodings: + raise der.UnexpectedDER( + "named_curve curve parameters not allowed" + ) + oid, empty = der.remove_object(data) + if empty: + raise der.UnexpectedDER("Unexpected data after OID") + return find_curve(oid) + + if "explicit" not in valid_encodings: + raise der.UnexpectedDER("explicit curve parameters not allowed") + + seq, empty = der.remove_sequence(data) + if empty: + raise der.UnexpectedDER( + "Unexpected data after ECParameters structure" + ) + # decode the ECParameters sequence + version, rest = der.remove_integer(seq) + if version != 1: + raise der.UnexpectedDER("Unknown parameter encoding format") + field_id, rest = der.remove_sequence(rest) + curve, rest = der.remove_sequence(rest) + base_bytes, rest = der.remove_octet_string(rest) + order, rest = der.remove_integer(rest) + cofactor = None + if rest: + # the ASN.1 specification of ECParameters allows for future + # extensions of the sequence, so ignore the remaining bytes + cofactor, _ = der.remove_integer(rest) + + # decode the ECParameters.fieldID sequence + field_type, rest = der.remove_object(field_id) + if field_type == CHARACTERISTIC_TWO_FIELD_OID: + raise UnknownCurveError("Characteristic 2 curves unsupported") + if field_type != PRIME_FIELD_OID: + raise UnknownCurveError( + "Unknown field type: {0}".format(field_type) + ) + prime, empty = der.remove_integer(rest) + if empty: + raise der.UnexpectedDER( + "Unexpected data after ECParameters.fieldID.Prime-p element" + ) + + # decode the ECParameters.curve sequence + curve_a_bytes, rest = der.remove_octet_string(curve) + curve_b_bytes, rest = der.remove_octet_string(rest) + # seed can be defined here, but we don't parse it, so ignore `rest` + + curve_a = string_to_number(curve_a_bytes) + curve_b = string_to_number(curve_b_bytes) + + curve_fp = ellipticcurve.CurveFp(prime, curve_a, curve_b, cofactor) + + # decode the ECParameters.base point + + base = ellipticcurve.PointJacobi.from_bytes( + curve_fp, + base_bytes, + valid_encodings=("uncompressed", "compressed", "hybrid"), + order=order, + generator=True, + ) + tmp_curve = Curve("unknown", curve_fp, base, None) + + # if the curve matches one of the well-known ones, use the well-known + # one in preference, as it will have the OID and name associated + for i in curves: + if tmp_curve == i: + return i + return tmp_curve + + @classmethod + def from_pem(cls, string, valid_encodings=None): + """Decode the curve parameters from PEM file. + + :param str string: the text string to decode the parameters from + :param valid_encodings: set of names of allowed encodings, by default + all (set by passing ``None``), supported ones are ``named_curve`` + and ``explicit`` + :type valid_encodings: :term:`set-like object` + """ + if not PY2 and isinstance(string, str): # pragma: no branch + string = string.encode() + + ec_param_index = string.find(b"-----BEGIN EC PARAMETERS-----") + if ec_param_index == -1: + raise der.UnexpectedDER("EC PARAMETERS PEM header not found") + + return cls.from_der( + der.unpem(string[ec_param_index:]), valid_encodings + ) + + +# the SEC curves +SECP112r1 = Curve( + "SECP112r1", + ecdsa.curve_112r1, + ecdsa.generator_112r1, + (1, 3, 132, 0, 6), + "secp112r1", +) + + +SECP112r2 = Curve( + "SECP112r2", + ecdsa.curve_112r2, + ecdsa.generator_112r2, + (1, 3, 132, 0, 7), + "secp112r2", +) + + +SECP128r1 = Curve( + "SECP128r1", + ecdsa.curve_128r1, + ecdsa.generator_128r1, + (1, 3, 132, 0, 28), + "secp128r1", +) + + +SECP160r1 = Curve( + "SECP160r1", + ecdsa.curve_160r1, + ecdsa.generator_160r1, + (1, 3, 132, 0, 8), + "secp160r1", +) + + +# the NIST curves +NIST192p = Curve( + "NIST192p", + ecdsa.curve_192, + ecdsa.generator_192, + (1, 2, 840, 10045, 3, 1, 1), + "prime192v1", +) + + +NIST224p = Curve( + "NIST224p", + ecdsa.curve_224, + ecdsa.generator_224, + (1, 3, 132, 0, 33), + "secp224r1", +) + + +NIST256p = Curve( + "NIST256p", + ecdsa.curve_256, + ecdsa.generator_256, + (1, 2, 840, 10045, 3, 1, 7), + "prime256v1", +) + + +NIST384p = Curve( + "NIST384p", + ecdsa.curve_384, + ecdsa.generator_384, + (1, 3, 132, 0, 34), + "secp384r1", +) + + +NIST521p = Curve( + "NIST521p", + ecdsa.curve_521, + ecdsa.generator_521, + (1, 3, 132, 0, 35), + "secp521r1", +) + + +SECP256k1 = Curve( + "SECP256k1", + ecdsa.curve_secp256k1, + ecdsa.generator_secp256k1, + (1, 3, 132, 0, 10), + "secp256k1", +) + + +BRAINPOOLP160r1 = Curve( + "BRAINPOOLP160r1", + ecdsa.curve_brainpoolp160r1, + ecdsa.generator_brainpoolp160r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 1), + "brainpoolP160r1", +) + + +BRAINPOOLP192r1 = Curve( + "BRAINPOOLP192r1", + ecdsa.curve_brainpoolp192r1, + ecdsa.generator_brainpoolp192r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 3), + "brainpoolP192r1", +) + + +BRAINPOOLP224r1 = Curve( + "BRAINPOOLP224r1", + ecdsa.curve_brainpoolp224r1, + ecdsa.generator_brainpoolp224r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 5), + "brainpoolP224r1", +) + + +BRAINPOOLP256r1 = Curve( + "BRAINPOOLP256r1", + ecdsa.curve_brainpoolp256r1, + ecdsa.generator_brainpoolp256r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 7), + "brainpoolP256r1", +) + + +BRAINPOOLP320r1 = Curve( + "BRAINPOOLP320r1", + ecdsa.curve_brainpoolp320r1, + ecdsa.generator_brainpoolp320r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 9), + "brainpoolP320r1", +) + + +BRAINPOOLP384r1 = Curve( + "BRAINPOOLP384r1", + ecdsa.curve_brainpoolp384r1, + ecdsa.generator_brainpoolp384r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 11), + "brainpoolP384r1", +) + + +BRAINPOOLP512r1 = Curve( + "BRAINPOOLP512r1", + ecdsa.curve_brainpoolp512r1, + ecdsa.generator_brainpoolp512r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 13), + "brainpoolP512r1", +) + + +# no order in particular, but keep previously added curves first +curves = [ + NIST192p, + NIST224p, + NIST256p, + NIST384p, + NIST521p, + SECP256k1, + BRAINPOOLP160r1, + BRAINPOOLP192r1, + BRAINPOOLP224r1, + BRAINPOOLP256r1, + BRAINPOOLP320r1, + BRAINPOOLP384r1, + BRAINPOOLP512r1, + SECP112r1, + SECP112r2, + SECP128r1, + SECP160r1, +] + + +def find_curve(oid_curve): + for c in curves: + if c.oid == oid_curve: + return c + raise UnknownCurveError( + "I don't know about the curve with oid %s." + "I only know about these: %s" % (oid_curve, [c.name for c in curves]) + ) diff --git a/venv/lib/python3.10/site-packages/ecdsa/der.py b/venv/lib/python3.10/site-packages/ecdsa/der.py new file mode 100644 index 0000000..459f13c --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/der.py @@ -0,0 +1,409 @@ +from __future__ import division + +import binascii +import base64 +import warnings +from itertools import chain +from six import int2byte, b, text_type +from ._compat import str_idx_as_int + + +class UnexpectedDER(Exception): + pass + + +def encode_constructed(tag, value): + return int2byte(0xA0 + tag) + encode_length(len(value)) + value + + +def encode_integer(r): + assert r >= 0 # can't support negative numbers yet + h = ("%x" % r).encode() + if len(h) % 2: + h = b("0") + h + s = binascii.unhexlify(h) + num = str_idx_as_int(s, 0) + if num <= 0x7F: + return b("\x02") + encode_length(len(s)) + s + else: + # DER integers are two's complement, so if the first byte is + # 0x80-0xff then we need an extra 0x00 byte to prevent it from + # looking negative. + return b("\x02") + encode_length(len(s) + 1) + b("\x00") + s + + +# sentry object to check if an argument was specified (used to detect +# deprecated calling convention) +_sentry = object() + + +def encode_bitstring(s, unused=_sentry): + """ + Encode a binary string as a BIT STRING using :term:`DER` encoding. + + Note, because there is no native Python object that can encode an actual + bit string, this function only accepts byte strings as the `s` argument. + The byte string is the actual bit string that will be encoded, padded + on the right (least significant bits, looking from big endian perspective) + to the first full byte. If the bit string has a bit length that is multiple + of 8, then the padding should not be included. For correct DER encoding + the padding bits MUST be set to 0. + + Number of bits of padding need to be provided as the `unused` parameter. + In case they are specified as None, it means the number of unused bits + is already encoded in the string as the first byte. + + The deprecated call convention specifies just the `s` parameters and + encodes the number of unused bits as first parameter (same convention + as with None). + + Empty string must be encoded with `unused` specified as 0. + + Future version of python-ecdsa will make specifying the `unused` argument + mandatory. + + :param s: bytes to encode + :type s: bytes like object + :param unused: number of bits at the end of `s` that are unused, must be + between 0 and 7 (inclusive) + :type unused: int or None + + :raises ValueError: when `unused` is too large or too small + + :return: `s` encoded using DER + :rtype: bytes + """ + encoded_unused = b"" + len_extra = 0 + if unused is _sentry: + warnings.warn( + "Legacy call convention used, unused= needs to be specified", + DeprecationWarning, + ) + elif unused is not None: + if not 0 <= unused <= 7: + raise ValueError("unused must be integer between 0 and 7") + if unused: + if not s: + raise ValueError("unused is non-zero but s is empty") + last = str_idx_as_int(s, -1) + if last & (2 ** unused - 1): + raise ValueError("unused bits must be zeros in DER") + encoded_unused = int2byte(unused) + len_extra = 1 + return b("\x03") + encode_length(len(s) + len_extra) + encoded_unused + s + + +def encode_octet_string(s): + return b("\x04") + encode_length(len(s)) + s + + +def encode_oid(first, second, *pieces): + assert 0 <= first < 2 and 0 <= second <= 39 or first == 2 and 0 <= second + body = b"".join( + chain( + [encode_number(40 * first + second)], + (encode_number(p) for p in pieces), + ) + ) + return b"\x06" + encode_length(len(body)) + body + + +def encode_sequence(*encoded_pieces): + total_len = sum([len(p) for p in encoded_pieces]) + return b("\x30") + encode_length(total_len) + b("").join(encoded_pieces) + + +def encode_number(n): + b128_digits = [] + while n: + b128_digits.insert(0, (n & 0x7F) | 0x80) + n = n >> 7 + if not b128_digits: + b128_digits.append(0) + b128_digits[-1] &= 0x7F + return b("").join([int2byte(d) for d in b128_digits]) + + +def is_sequence(string): + return string and string[:1] == b"\x30" + + +def remove_constructed(string): + s0 = str_idx_as_int(string, 0) + if (s0 & 0xE0) != 0xA0: + raise UnexpectedDER( + "wanted type 'constructed tag' (0xa0-0xbf), got 0x%02x" % s0 + ) + tag = s0 & 0x1F + length, llen = read_length(string[1:]) + body = string[1 + llen : 1 + llen + length] + rest = string[1 + llen + length :] + return tag, body, rest + + +def remove_sequence(string): + if not string: + raise UnexpectedDER("Empty string does not encode a sequence") + if string[:1] != b"\x30": + n = str_idx_as_int(string, 0) + raise UnexpectedDER("wanted type 'sequence' (0x30), got 0x%02x" % n) + length, lengthlength = read_length(string[1:]) + if length > len(string) - 1 - lengthlength: + raise UnexpectedDER("Length longer than the provided buffer") + endseq = 1 + lengthlength + length + return string[1 + lengthlength : endseq], string[endseq:] + + +def remove_octet_string(string): + if string[:1] != b"\x04": + n = str_idx_as_int(string, 0) + raise UnexpectedDER("wanted type 'octetstring' (0x04), got 0x%02x" % n) + length, llen = read_length(string[1:]) + body = string[1 + llen : 1 + llen + length] + rest = string[1 + llen + length :] + return body, rest + + +def remove_object(string): + if not string: + raise UnexpectedDER( + "Empty string does not encode an object identifier" + ) + if string[:1] != b"\x06": + n = str_idx_as_int(string, 0) + raise UnexpectedDER("wanted type 'object' (0x06), got 0x%02x" % n) + length, lengthlength = read_length(string[1:]) + body = string[1 + lengthlength : 1 + lengthlength + length] + rest = string[1 + lengthlength + length :] + if not body: + raise UnexpectedDER("Empty object identifier") + if len(body) != length: + raise UnexpectedDER( + "Length of object identifier longer than the provided buffer" + ) + numbers = [] + while body: + n, ll = read_number(body) + numbers.append(n) + body = body[ll:] + n0 = numbers.pop(0) + if n0 < 80: + first = n0 // 40 + else: + first = 2 + second = n0 - (40 * first) + numbers.insert(0, first) + numbers.insert(1, second) + return tuple(numbers), rest + + +def remove_integer(string): + if not string: + raise UnexpectedDER( + "Empty string is an invalid encoding of an integer" + ) + if string[:1] != b"\x02": + n = str_idx_as_int(string, 0) + raise UnexpectedDER("wanted type 'integer' (0x02), got 0x%02x" % n) + length, llen = read_length(string[1:]) + if length > len(string) - 1 - llen: + raise UnexpectedDER("Length longer than provided buffer") + if length == 0: + raise UnexpectedDER("0-byte long encoding of integer") + numberbytes = string[1 + llen : 1 + llen + length] + rest = string[1 + llen + length :] + msb = str_idx_as_int(numberbytes, 0) + if not msb < 0x80: + raise UnexpectedDER("Negative integers are not supported") + # check if the encoding is the minimal one (DER requirement) + if length > 1 and not msb: + # leading zero byte is allowed if the integer would have been + # considered a negative number otherwise + smsb = str_idx_as_int(numberbytes, 1) + if smsb < 0x80: + raise UnexpectedDER( + "Invalid encoding of integer, unnecessary " + "zero padding bytes" + ) + return int(binascii.hexlify(numberbytes), 16), rest + + +def read_number(string): + number = 0 + llen = 0 + if str_idx_as_int(string, 0) == 0x80: + raise UnexpectedDER("Non minimal encoding of OID subidentifier") + # base-128 big endian, with most significant bit set in all but the last + # byte + while True: + if llen >= len(string): + raise UnexpectedDER("ran out of length bytes") + number = number << 7 + d = str_idx_as_int(string, llen) + number += d & 0x7F + llen += 1 + if not d & 0x80: + break + return number, llen + + +def encode_length(l): + assert l >= 0 + if l < 0x80: + return int2byte(l) + s = ("%x" % l).encode() + if len(s) % 2: + s = b("0") + s + s = binascii.unhexlify(s) + llen = len(s) + return int2byte(0x80 | llen) + s + + +def read_length(string): + if not string: + raise UnexpectedDER("Empty string can't encode valid length value") + num = str_idx_as_int(string, 0) + if not (num & 0x80): + # short form + return (num & 0x7F), 1 + # else long-form: b0&0x7f is number of additional base256 length bytes, + # big-endian + llen = num & 0x7F + if not llen: + raise UnexpectedDER("Invalid length encoding, length of length is 0") + if llen > len(string) - 1: + raise UnexpectedDER("Length of length longer than provided buffer") + # verify that the encoding is minimal possible (DER requirement) + msb = str_idx_as_int(string, 1) + if not msb or llen == 1 and msb < 0x80: + raise UnexpectedDER("Not minimal encoding of length") + return int(binascii.hexlify(string[1 : 1 + llen]), 16), 1 + llen + + +def remove_bitstring(string, expect_unused=_sentry): + """ + Remove a BIT STRING object from `string` following :term:`DER`. + + The `expect_unused` can be used to specify if the bit string should + have the amount of unused bits decoded or not. If it's an integer, any + read BIT STRING that has number of unused bits different from specified + value will cause UnexpectedDER exception to be raised (this is especially + useful when decoding BIT STRINGS that have DER encoded object in them; + DER encoding is byte oriented, so the unused bits will always equal 0). + + If the `expect_unused` is specified as None, the first element returned + will be a tuple, with the first value being the extracted bit string + while the second value will be the decoded number of unused bits. + + If the `expect_unused` is unspecified, the decoding of byte with + number of unused bits will not be attempted and the bit string will be + returned as-is, the callee will be required to decode it and verify its + correctness. + + Future version of python will require the `expected_unused` parameter + to be specified. + + :param string: string of bytes to extract the BIT STRING from + :type string: bytes like object + :param expect_unused: number of bits that should be unused in the BIT + STRING, or None, to return it to caller + :type expect_unused: int or None + + :raises UnexpectedDER: when the encoding does not follow DER. + + :return: a tuple with first element being the extracted bit string and + the second being the remaining bytes in the string (if any); if the + `expect_unused` is specified as None, the first element of the returned + tuple will be a tuple itself, with first element being the bit string + as bytes and the second element being the number of unused bits at the + end of the byte array as an integer + :rtype: tuple + """ + if not string: + raise UnexpectedDER("Empty string does not encode a bitstring") + if expect_unused is _sentry: + warnings.warn( + "Legacy call convention used, expect_unused= needs to be" + " specified", + DeprecationWarning, + ) + num = str_idx_as_int(string, 0) + if string[:1] != b"\x03": + raise UnexpectedDER("wanted bitstring (0x03), got 0x%02x" % num) + length, llen = read_length(string[1:]) + if not length: + raise UnexpectedDER("Invalid length of bit string, can't be 0") + body = string[1 + llen : 1 + llen + length] + rest = string[1 + llen + length :] + if expect_unused is not _sentry: + unused = str_idx_as_int(body, 0) + if not 0 <= unused <= 7: + raise UnexpectedDER("Invalid encoding of unused bits") + if expect_unused is not None and expect_unused != unused: + raise UnexpectedDER("Unexpected number of unused bits") + body = body[1:] + if unused: + if not body: + raise UnexpectedDER("Invalid encoding of empty bit string") + last = str_idx_as_int(body, -1) + # verify that all the unused bits are set to zero (DER requirement) + if last & (2 ** unused - 1): + raise UnexpectedDER("Non zero padding bits in bit string") + if expect_unused is None: + body = (body, unused) + return body, rest + + +# SEQUENCE([1, STRING(secexp), cont[0], OBJECT(curvename), cont[1], BINTSTRING) + + +# signatures: (from RFC3279) +# ansi-X9-62 OBJECT IDENTIFIER ::= { +# iso(1) member-body(2) us(840) 10045 } +# +# id-ecSigType OBJECT IDENTIFIER ::= { +# ansi-X9-62 signatures(4) } +# ecdsa-with-SHA1 OBJECT IDENTIFIER ::= { +# id-ecSigType 1 } +# so 1,2,840,10045,4,1 +# so 0x42, .. .. + +# Ecdsa-Sig-Value ::= SEQUENCE { +# r INTEGER, +# s INTEGER } + +# id-public-key-type OBJECT IDENTIFIER ::= { ansi-X9.62 2 } +# +# id-ecPublicKey OBJECT IDENTIFIER ::= { id-publicKeyType 1 } + +# I think the secp224r1 identifier is (t=06,l=05,v=2b81040021) +# secp224r1 OBJECT IDENTIFIER ::= { +# iso(1) identified-organization(3) certicom(132) curve(0) 33 } +# and the secp384r1 is (t=06,l=05,v=2b81040022) +# secp384r1 OBJECT IDENTIFIER ::= { +# iso(1) identified-organization(3) certicom(132) curve(0) 34 } + + +def unpem(pem): + if isinstance(pem, text_type): # pragma: no branch + pem = pem.encode() + + d = b("").join( + [ + l.strip() + for l in pem.split(b("\n")) + if l and not l.startswith(b("-----")) + ] + ) + return base64.b64decode(d) + + +def topem(der, name): + b64 = base64.b64encode(der) + lines = [("-----BEGIN %s-----\n" % name).encode()] + lines.extend( + [b64[start : start + 64] + b("\n") for start in range(0, len(b64), 64)] + ) + lines.append(("-----END %s-----\n" % name).encode()) + return b("").join(lines) diff --git a/venv/lib/python3.10/site-packages/ecdsa/ecdh.py b/venv/lib/python3.10/site-packages/ecdsa/ecdh.py new file mode 100644 index 0000000..824a09b --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/ecdh.py @@ -0,0 +1,336 @@ +""" +Class for performing Elliptic-curve Diffie-Hellman (ECDH) operations. +""" + +from .util import number_to_string +from .ellipticcurve import INFINITY +from .keys import SigningKey, VerifyingKey + + +__all__ = [ + "ECDH", + "NoKeyError", + "NoCurveError", + "InvalidCurveError", + "InvalidSharedSecretError", +] + + +class NoKeyError(Exception): + """ECDH. Key not found but it is needed for operation.""" + + pass + + +class NoCurveError(Exception): + """ECDH. Curve not set but it is needed for operation.""" + + pass + + +class InvalidCurveError(Exception): + """ + ECDH. Raised in case the public and private keys use different curves. + """ + + pass + + +class InvalidSharedSecretError(Exception): + """ECDH. Raised in case the shared secret we obtained is an INFINITY.""" + + pass + + +class ECDH(object): + """ + Elliptic-curve Diffie-Hellman (ECDH). A key agreement protocol. + + Allows two parties, each having an elliptic-curve public-private key + pair, to establish a shared secret over an insecure channel + """ + + def __init__(self, curve=None, private_key=None, public_key=None): + """ + ECDH init. + + Call can be initialised without parameters, then the first operation + (loading either key) will set the used curve. + All parameters must be ultimately set before shared secret + calculation will be allowed. + + :param curve: curve for operations + :type curve: Curve + :param private_key: `my` private key for ECDH + :type private_key: SigningKey + :param public_key: `their` public key for ECDH + :type public_key: VerifyingKey + """ + self.curve = curve + self.private_key = None + self.public_key = None + if private_key: + self.load_private_key(private_key) + if public_key: + self.load_received_public_key(public_key) + + def _get_shared_secret(self, remote_public_key): + if not self.private_key: + raise NoKeyError( + "Private key needs to be set to create shared secret" + ) + if not self.public_key: + raise NoKeyError( + "Public key needs to be set to create shared secret" + ) + if not ( + self.private_key.curve == self.curve == remote_public_key.curve + ): + raise InvalidCurveError( + "Curves for public key and private key is not equal." + ) + + # shared secret = PUBKEYtheirs * PRIVATEKEYours + result = ( + remote_public_key.pubkey.point + * self.private_key.privkey.secret_multiplier + ) + if result == INFINITY: + raise InvalidSharedSecretError("Invalid shared secret (INFINITY).") + + return result.x() + + def set_curve(self, key_curve): + """ + Set the working curve for ecdh operations. + + :param key_curve: curve from `curves` module + :type key_curve: Curve + """ + self.curve = key_curve + + def generate_private_key(self): + """ + Generate local private key for ecdh operation with curve that was set. + + :raises NoCurveError: Curve must be set before key generation. + + :return: public (verifying) key from this private key. + :rtype: VerifyingKey object + """ + if not self.curve: + raise NoCurveError("Curve must be set prior to key generation.") + return self.load_private_key(SigningKey.generate(curve=self.curve)) + + def load_private_key(self, private_key): + """ + Load private key from SigningKey (keys.py) object. + + Needs to have the same curve as was set with set_curve method. + If curve is not set - it sets from this SigningKey + + :param private_key: Initialised SigningKey class + :type private_key: SigningKey + + :raises InvalidCurveError: private_key curve not the same as self.curve + + :return: public (verifying) key from this private key. + :rtype: VerifyingKey object + """ + if not self.curve: + self.curve = private_key.curve + if self.curve != private_key.curve: + raise InvalidCurveError("Curve mismatch.") + self.private_key = private_key + return self.private_key.get_verifying_key() + + def load_private_key_bytes(self, private_key): + """ + Load private key from byte string. + + Uses current curve and checks if the provided key matches + the curve of ECDH key agreement. + Key loads via from_string method of SigningKey class + + :param private_key: private key in bytes string format + :type private_key: :term:`bytes-like object` + + :raises NoCurveError: Curve must be set before loading. + + :return: public (verifying) key from this private key. + :rtype: VerifyingKey object + """ + if not self.curve: + raise NoCurveError("Curve must be set prior to key load.") + return self.load_private_key( + SigningKey.from_string(private_key, curve=self.curve) + ) + + def load_private_key_der(self, private_key_der): + """ + Load private key from DER byte string. + + Compares the curve of the DER-encoded key with the ECDH set curve, + uses the former if unset. + + Note, the only DER format supported is the RFC5915 + Look at keys.py:SigningKey.from_der() + + :param private_key_der: string with the DER encoding of private ECDSA + key + :type private_key_der: string + + :raises InvalidCurveError: private_key curve not the same as self.curve + + :return: public (verifying) key from this private key. + :rtype: VerifyingKey object + """ + return self.load_private_key(SigningKey.from_der(private_key_der)) + + def load_private_key_pem(self, private_key_pem): + """ + Load private key from PEM string. + + Compares the curve of the DER-encoded key with the ECDH set curve, + uses the former if unset. + + Note, the only PEM format supported is the RFC5915 + Look at keys.py:SigningKey.from_pem() + it needs to have `EC PRIVATE KEY` section + + :param private_key_pem: string with PEM-encoded private ECDSA key + :type private_key_pem: string + + :raises InvalidCurveError: private_key curve not the same as self.curve + + :return: public (verifying) key from this private key. + :rtype: VerifyingKey object + """ + return self.load_private_key(SigningKey.from_pem(private_key_pem)) + + def get_public_key(self): + """ + Provides a public key that matches the local private key. + + Needs to be sent to the remote party. + + :return: public (verifying) key from local private key. + :rtype: VerifyingKey object + """ + return self.private_key.get_verifying_key() + + def load_received_public_key(self, public_key): + """ + Load public key from VerifyingKey (keys.py) object. + + Needs to have the same curve as set as current for ecdh operation. + If curve is not set - it sets it from VerifyingKey. + + :param public_key: Initialised VerifyingKey class + :type public_key: VerifyingKey + + :raises InvalidCurveError: public_key curve not the same as self.curve + """ + if not self.curve: + self.curve = public_key.curve + if self.curve != public_key.curve: + raise InvalidCurveError("Curve mismatch.") + self.public_key = public_key + + def load_received_public_key_bytes( + self, public_key_str, valid_encodings=None + ): + """ + Load public key from byte string. + + Uses current curve and checks if key length corresponds to + the current curve. + Key loads via from_string method of VerifyingKey class + + :param public_key_str: public key in bytes string format + :type public_key_str: :term:`bytes-like object` + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + :type valid_encodings: :term:`set-like object` + """ + return self.load_received_public_key( + VerifyingKey.from_string( + public_key_str, self.curve, valid_encodings + ) + ) + + def load_received_public_key_der(self, public_key_der): + """ + Load public key from DER byte string. + + Compares the curve of the DER-encoded key with the ECDH set curve, + uses the former if unset. + + Note, the only DER format supported is the RFC5912 + Look at keys.py:VerifyingKey.from_der() + + :param public_key_der: string with the DER encoding of public ECDSA key + :type public_key_der: string + + :raises InvalidCurveError: public_key curve not the same as self.curve + """ + return self.load_received_public_key( + VerifyingKey.from_der(public_key_der) + ) + + def load_received_public_key_pem(self, public_key_pem): + """ + Load public key from PEM string. + + Compares the curve of the PEM-encoded key with the ECDH set curve, + uses the former if unset. + + Note, the only PEM format supported is the RFC5912 + Look at keys.py:VerifyingKey.from_pem() + + :param public_key_pem: string with PEM-encoded public ECDSA key + :type public_key_pem: string + + :raises InvalidCurveError: public_key curve not the same as self.curve + """ + return self.load_received_public_key( + VerifyingKey.from_pem(public_key_pem) + ) + + def generate_sharedsecret_bytes(self): + """ + Generate shared secret from local private key and remote public key. + + The objects needs to have both private key and received public key + before generation is allowed. + + :raises InvalidCurveError: public_key curve not the same as self.curve + :raises NoKeyError: public_key or private_key is not set + + :return: shared secret + :rtype: byte string + """ + return number_to_string( + self.generate_sharedsecret(), self.private_key.curve.curve.p() + ) + + def generate_sharedsecret(self): + """ + Generate shared secret from local private key and remote public key. + + The objects needs to have both private key and received public key + before generation is allowed. + + It's the same for local and remote party. + shared secret(local private key, remote public key ) == + shared secret (local public key, remote private key) + + :raises InvalidCurveError: public_key curve not the same as self.curve + :raises NoKeyError: public_key or private_key is not set + + :return: shared secret + :rtype: int + """ + return self._get_shared_secret(self.public_key) diff --git a/venv/lib/python3.10/site-packages/ecdsa/ecdsa.py b/venv/lib/python3.10/site-packages/ecdsa/ecdsa.py new file mode 100644 index 0000000..1e24c8f --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/ecdsa.py @@ -0,0 +1,835 @@ +#! /usr/bin/env python + +""" +Implementation of Elliptic-Curve Digital Signatures. + +Classes and methods for elliptic-curve signatures: +private keys, public keys, signatures, +NIST prime-modulus curves with modulus lengths of +192, 224, 256, 384, and 521 bits. + +Example: + + # (In real-life applications, you would probably want to + # protect against defects in SystemRandom.) + from random import SystemRandom + randrange = SystemRandom().randrange + + # Generate a public/private key pair using the NIST Curve P-192: + + g = generator_192 + n = g.order() + secret = randrange( 1, n ) + pubkey = Public_key( g, g * secret ) + privkey = Private_key( pubkey, secret ) + + # Signing a hash value: + + hash = randrange( 1, n ) + signature = privkey.sign( hash, randrange( 1, n ) ) + + # Verifying a signature for a hash value: + + if pubkey.verifies( hash, signature ): + print_("Demo verification succeeded.") + else: + print_("*** Demo verification failed.") + + # Verification fails if the hash value is modified: + + if pubkey.verifies( hash-1, signature ): + print_("**** Demo verification failed to reject tampered hash.") + else: + print_("Demo verification correctly rejected tampered hash.") + +Version of 2009.05.16. + +Revision history: + 2005.12.31 - Initial version. + 2008.11.25 - Substantial revisions introducing new classes. + 2009.05.16 - Warn against using random.randrange in real applications. + 2009.05.17 - Use random.SystemRandom by default. + +Written in 2005 by Peter Pearson and placed in the public domain. +""" + +from six import int2byte, b +from . import ellipticcurve +from . import numbertheory +from .util import bit_length +from ._compat import remove_whitespace + + +class RSZeroError(RuntimeError): + pass + + +class InvalidPointError(RuntimeError): + pass + + +class Signature(object): + """ECDSA signature.""" + + def __init__(self, r, s): + self.r = r + self.s = s + + def recover_public_keys(self, hash, generator): + """Returns two public keys for which the signature is valid + hash is signed hash + generator is the used generator of the signature + """ + curve = generator.curve() + n = generator.order() + r = self.r + s = self.s + e = hash + x = r + + # Compute the curve point with x as x-coordinate + alpha = ( + pow(x, 3, curve.p()) + (curve.a() * x) + curve.b() + ) % curve.p() + beta = numbertheory.square_root_mod_prime(alpha, curve.p()) + y = beta if beta % 2 == 0 else curve.p() - beta + + # Compute the public key + R1 = ellipticcurve.PointJacobi(curve, x, y, 1, n) + Q1 = numbertheory.inverse_mod(r, n) * (s * R1 + (-e % n) * generator) + Pk1 = Public_key(generator, Q1) + + # And the second solution + R2 = ellipticcurve.PointJacobi(curve, x, -y, 1, n) + Q2 = numbertheory.inverse_mod(r, n) * (s * R2 + (-e % n) * generator) + Pk2 = Public_key(generator, Q2) + + return [Pk1, Pk2] + + +class Public_key(object): + """Public key for ECDSA.""" + + def __init__(self, generator, point, verify=True): + """Low level ECDSA public key object. + + :param generator: the Point that generates the group (the base point) + :param point: the Point that defines the public key + :param bool verify: if True check if point is valid point on curve + + :raises InvalidPointError: if the point parameters are invalid or + point does not lay on the curve + """ + + self.curve = generator.curve() + self.generator = generator + self.point = point + n = generator.order() + p = self.curve.p() + if not (0 <= point.x() < p) or not (0 <= point.y() < p): + raise InvalidPointError( + "The public point has x or y out of range." + ) + if verify and not self.curve.contains_point(point.x(), point.y()): + raise InvalidPointError("Point does not lay on the curve") + if not n: + raise InvalidPointError("Generator point must have order.") + # for curve parameters with base point with cofactor 1, all points + # that are on the curve are scalar multiples of the base point, so + # verifying that is not necessary. See Section 3.2.2.1 of SEC 1 v2 + if ( + verify + and self.curve.cofactor() != 1 + and not n * point == ellipticcurve.INFINITY + ): + raise InvalidPointError("Generator point order is bad.") + + def __eq__(self, other): + """Return True if the keys are identical, False otherwise. + + Note: for comparison, only placement on the same curve and point + equality is considered, use of the same generator point is not + considered. + """ + if isinstance(other, Public_key): + return self.curve == other.curve and self.point == other.point + return NotImplemented + + def __ne__(self, other): + """Return False if the keys are identical, True otherwise.""" + return not self == other + + def verifies(self, hash, signature): + """Verify that signature is a valid signature of hash. + Return True if the signature is valid. + """ + + # From X9.62 J.3.1. + + G = self.generator + n = G.order() + r = signature.r + s = signature.s + if r < 1 or r > n - 1: + return False + if s < 1 or s > n - 1: + return False + c = numbertheory.inverse_mod(s, n) + u1 = (hash * c) % n + u2 = (r * c) % n + if hasattr(G, "mul_add"): + xy = G.mul_add(u1, self.point, u2) + else: + xy = u1 * G + u2 * self.point + v = xy.x() % n + return v == r + + +class Private_key(object): + """Private key for ECDSA.""" + + def __init__(self, public_key, secret_multiplier): + """public_key is of class Public_key; + secret_multiplier is a large integer. + """ + + self.public_key = public_key + self.secret_multiplier = secret_multiplier + + def __eq__(self, other): + """Return True if the points are identical, False otherwise.""" + if isinstance(other, Private_key): + return ( + self.public_key == other.public_key + and self.secret_multiplier == other.secret_multiplier + ) + return NotImplemented + + def __ne__(self, other): + """Return False if the points are identical, True otherwise.""" + return not self == other + + def sign(self, hash, random_k): + """Return a signature for the provided hash, using the provided + random nonce. It is absolutely vital that random_k be an unpredictable + number in the range [1, self.public_key.point.order()-1]. If + an attacker can guess random_k, he can compute our private key from a + single signature. Also, if an attacker knows a few high-order + bits (or a few low-order bits) of random_k, he can compute our private + key from many signatures. The generation of nonces with adequate + cryptographic strength is very difficult and far beyond the scope + of this comment. + + May raise RuntimeError, in which case retrying with a new + random value k is in order. + """ + + G = self.public_key.generator + n = G.order() + k = random_k % n + # Fix the bit-length of the random nonce, + # so that it doesn't leak via timing. + # This does not change that ks = k mod n + ks = k + n + kt = ks + n + if bit_length(ks) == bit_length(n): + p1 = kt * G + else: + p1 = ks * G + r = p1.x() % n + if r == 0: + raise RSZeroError("amazingly unlucky random number r") + s = ( + numbertheory.inverse_mod(k, n) + * (hash + (self.secret_multiplier * r) % n) + ) % n + if s == 0: + raise RSZeroError("amazingly unlucky random number s") + return Signature(r, s) + + +def int_to_string(x): + """Convert integer x into a string of bytes, as per X9.62.""" + assert x >= 0 + if x == 0: + return b("\0") + result = [] + while x: + ordinal = x & 0xFF + result.append(int2byte(ordinal)) + x >>= 8 + + result.reverse() + return b("").join(result) + + +def string_to_int(s): + """Convert a string of bytes into an integer, as per X9.62.""" + result = 0 + for c in s: + if not isinstance(c, int): + c = ord(c) + result = 256 * result + c + return result + + +def digest_integer(m): + """Convert an integer into a string of bytes, compute + its SHA-1 hash, and convert the result to an integer.""" + # + # I don't expect this function to be used much. I wrote + # it in order to be able to duplicate the examples + # in ECDSAVS. + # + from hashlib import sha1 + + return string_to_int(sha1(int_to_string(m)).digest()) + + +def point_is_valid(generator, x, y): + """Is (x,y) a valid public key based on the specified generator?""" + + # These are the tests specified in X9.62. + + n = generator.order() + curve = generator.curve() + p = curve.p() + if not (0 <= x < p) or not (0 <= y < p): + return False + if not curve.contains_point(x, y): + return False + if ( + curve.cofactor() != 1 + and not n * ellipticcurve.PointJacobi(curve, x, y, 1) + == ellipticcurve.INFINITY + ): + return False + return True + + +# secp112r1 curve +_p = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD208B"), 16) +# s = 00F50B02 8E4D696E 67687561 51752904 72783FB1 +_a = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD2088"), 16) +_b = int(remove_whitespace("659E F8BA0439 16EEDE89 11702B22"), 16) +_Gx = int(remove_whitespace("09487239 995A5EE7 6B55F9C2 F098"), 16) +_Gy = int(remove_whitespace("A89C E5AF8724 C0A23E0E 0FF77500"), 16) +_r = int(remove_whitespace("DB7C 2ABF62E3 5E7628DF AC6561C5"), 16) +_h = 1 +curve_112r1 = ellipticcurve.CurveFp(_p, _a, _b, _h) +generator_112r1 = ellipticcurve.PointJacobi( + curve_112r1, _Gx, _Gy, 1, _r, generator=True +) + + +# secp112r2 curve +_p = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD208B"), 16) +# s = 022757A1 114D69E 67687561 51755316 C05E0BD4 +_a = int(remove_whitespace("6127 C24C05F3 8A0AAAF6 5C0EF02C"), 16) +_b = int(remove_whitespace("51DE F1815DB5 ED74FCC3 4C85D709"), 16) +_Gx = int(remove_whitespace("4BA30AB5 E892B4E1 649DD092 8643"), 16) +_Gy = int(remove_whitespace("ADCD 46F5882E 3747DEF3 6E956E97"), 16) +_r = int(remove_whitespace("36DF 0AAFD8B8 D7597CA1 0520D04B"), 16) +_h = 4 +curve_112r2 = ellipticcurve.CurveFp(_p, _a, _b, _h) +generator_112r2 = ellipticcurve.PointJacobi( + curve_112r2, _Gx, _Gy, 1, _r, generator=True +) + + +# secp128r1 curve +_p = int(remove_whitespace("FFFFFFFD FFFFFFFF FFFFFFFF FFFFFFFF"), 16) +# S = 000E0D4D 69E6768 75615175 0CC03A44 73D03679 +# a and b are mod p, so a is equal to p-3, or simply -3 +# _a = -3 +_b = int(remove_whitespace("E87579C1 1079F43D D824993C 2CEE5ED3"), 16) +_Gx = int(remove_whitespace("161FF752 8B899B2D 0C28607C A52C5B86"), 16) +_Gy = int(remove_whitespace("CF5AC839 5BAFEB13 C02DA292 DDED7A83"), 16) +_r = int(remove_whitespace("FFFFFFFE 00000000 75A30D1B 9038A115"), 16) +_h = 1 +curve_128r1 = ellipticcurve.CurveFp(_p, -3, _b, _h) +generator_128r1 = ellipticcurve.PointJacobi( + curve_128r1, _Gx, _Gy, 1, _r, generator=True +) + + +# secp160r1 +_p = int(remove_whitespace("FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF 7FFFFFFF"), 16) +# S = 1053CDE4 2C14D696 E6768756 1517533B F3F83345 +# a and b are mod p, so a is equal to p-3, or simply -3 +# _a = -3 +_b = int(remove_whitespace("1C97BEFC 54BD7A8B 65ACF89F 81D4D4AD C565FA45"), 16) +_Gx = int( + remove_whitespace("4A96B568 8EF57328 46646989 68C38BB9 13CBFC82"), 16, +) +_Gy = int( + remove_whitespace("23A62855 3168947D 59DCC912 04235137 7AC5FB32"), 16, +) +_r = int( + remove_whitespace("01 00000000 00000000 0001F4C8 F927AED3 CA752257"), 16, +) +_h = 1 +curve_160r1 = ellipticcurve.CurveFp(_p, -3, _b, _h) +generator_160r1 = ellipticcurve.PointJacobi( + curve_160r1, _Gx, _Gy, 1, _r, generator=True +) + + +# NIST Curve P-192: +_p = 6277101735386680763835789423207666416083908700390324961279 +_r = 6277101735386680763835789423176059013767194773182842284081 +# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L +# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65L +_b = int( + remove_whitespace( + """ + 64210519 E59C80E7 0FA7E9AB 72243049 FEB8DEEC C146B9B1""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + 188DA80E B03090F6 7CBF20EB 43A18800 F4FF0AFD 82FF1012""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 07192B95 FFC8DA78 631011ED 6B24CDD5 73F977A1 1E794811""" + ), + 16, +) + +curve_192 = ellipticcurve.CurveFp(_p, -3, _b, 1) +generator_192 = ellipticcurve.PointJacobi( + curve_192, _Gx, _Gy, 1, _r, generator=True +) + + +# NIST Curve P-224: +_p = int( + remove_whitespace( + """ + 2695994666715063979466701508701963067355791626002630814351 + 0066298881""" + ) +) +_r = int( + remove_whitespace( + """ + 2695994666715063979466701508701962594045780771442439172168 + 2722368061""" + ) +) +# s = 0xbd71344799d5c7fcdc45b59fa3b9ab8f6a948bc5L +# c = 0x5b056c7e11dd68f40469ee7f3c7a7d74f7d121116506d031218291fbL +_b = int( + remove_whitespace( + """ + B4050A85 0C04B3AB F5413256 5044B0B7 D7BFD8BA 270B3943 + 2355FFB4""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + B70E0CBD 6BB4BF7F 321390B9 4A03C1D3 56C21122 343280D6 + 115C1D21""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + BD376388 B5F723FB 4C22DFE6 CD4375A0 5A074764 44D58199 + 85007E34""" + ), + 16, +) + +curve_224 = ellipticcurve.CurveFp(_p, -3, _b, 1) +generator_224 = ellipticcurve.PointJacobi( + curve_224, _Gx, _Gy, 1, _r, generator=True +) + +# NIST Curve P-256: +_p = int( + remove_whitespace( + """ + 1157920892103562487626974469494075735300861434152903141955 + 33631308867097853951""" + ) +) +_r = int( + remove_whitespace( + """ + 115792089210356248762697446949407573529996955224135760342 + 422259061068512044369""" + ) +) +# s = 0xc49d360886e704936a6678e1139d26b7819f7e90L +# c = 0x7efba1662985be9403cb055c75d4f7e0ce8d84a9c5114abcaf3177680104fa0dL +_b = int( + remove_whitespace( + """ + 5AC635D8 AA3A93E7 B3EBBD55 769886BC 651D06B0 CC53B0F6 + 3BCE3C3E 27D2604B""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + 6B17D1F2 E12C4247 F8BCE6E5 63A440F2 77037D81 2DEB33A0 + F4A13945 D898C296""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 4FE342E2 FE1A7F9B 8EE7EB4A 7C0F9E16 2BCE3357 6B315ECE + CBB64068 37BF51F5""" + ), + 16, +) + +curve_256 = ellipticcurve.CurveFp(_p, -3, _b, 1) +generator_256 = ellipticcurve.PointJacobi( + curve_256, _Gx, _Gy, 1, _r, generator=True +) + +# NIST Curve P-384: +_p = int( + remove_whitespace( + """ + 3940200619639447921227904010014361380507973927046544666794 + 8293404245721771496870329047266088258938001861606973112319""" + ) +) +_r = int( + remove_whitespace( + """ + 3940200619639447921227904010014361380507973927046544666794 + 6905279627659399113263569398956308152294913554433653942643""" + ) +) +# s = 0xa335926aa319a27a1d00896a6773a4827acdac73L +# c = int(remove_whitespace( +# """ +# 79d1e655 f868f02f ff48dcde e14151dd b80643c1 406d0ca1 +# 0dfe6fc5 2009540a 495e8042 ea5f744f 6e184667 cc722483""" +# ), 16) +_b = int( + remove_whitespace( + """ + B3312FA7 E23EE7E4 988E056B E3F82D19 181D9C6E FE814112 + 0314088F 5013875A C656398D 8A2ED19D 2A85C8ED D3EC2AEF""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + AA87CA22 BE8B0537 8EB1C71E F320AD74 6E1D3B62 8BA79B98 + 59F741E0 82542A38 5502F25D BF55296C 3A545E38 72760AB7""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 3617DE4A 96262C6F 5D9E98BF 9292DC29 F8F41DBD 289A147C + E9DA3113 B5F0B8C0 0A60B1CE 1D7E819D 7A431D7C 90EA0E5F""" + ), + 16, +) + +curve_384 = ellipticcurve.CurveFp(_p, -3, _b, 1) +generator_384 = ellipticcurve.PointJacobi( + curve_384, _Gx, _Gy, 1, _r, generator=True +) + +# NIST Curve P-521: +_p = int( + "686479766013060971498190079908139321726943530014330540939" + "446345918554318339765605212255964066145455497729631139148" + "0858037121987999716643812574028291115057151" +) +_r = int( + "686479766013060971498190079908139321726943530014330540939" + "446345918554318339765539424505774633321719753296399637136" + "3321113864768612440380340372808892707005449" +) +# s = 0xd09e8800291cb85396cc6717393284aaa0da64baL +# c = int(remove_whitespace( +# """ +# 0b4 8bfa5f42 0a349495 39d2bdfc 264eeeeb 077688e4 +# 4fbf0ad8 f6d0edb3 7bd6b533 28100051 8e19f1b9 ffbe0fe9 +# ed8a3c22 00b8f875 e523868c 70c1e5bf 55bad637""" +# ), 16) +_b = int( + remove_whitespace( + """ + 051 953EB961 8E1C9A1F 929A21A0 B68540EE A2DA725B + 99B315F3 B8B48991 8EF109E1 56193951 EC7E937B 1652C0BD + 3BB1BF07 3573DF88 3D2C34F1 EF451FD4 6B503F00""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + C6 858E06B7 0404E9CD 9E3ECB66 2395B442 9C648139 + 053FB521 F828AF60 6B4D3DBA A14B5E77 EFE75928 FE1DC127 + A2FFA8DE 3348B3C1 856A429B F97E7E31 C2E5BD66""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 118 39296A78 9A3BC004 5C8A5FB4 2C7D1BD9 98F54449 + 579B4468 17AFBD17 273E662C 97EE7299 5EF42640 C550B901 + 3FAD0761 353C7086 A272C240 88BE9476 9FD16650""" + ), + 16, +) + +curve_521 = ellipticcurve.CurveFp(_p, -3, _b, 1) +generator_521 = ellipticcurve.PointJacobi( + curve_521, _Gx, _Gy, 1, _r, generator=True +) + +# Certicom secp256-k1 +_a = 0x0000000000000000000000000000000000000000000000000000000000000000 +_b = 0x0000000000000000000000000000000000000000000000000000000000000007 +_p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F +_Gx = 0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798 +_Gy = 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8 +_r = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 + +curve_secp256k1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_secp256k1 = ellipticcurve.PointJacobi( + curve_secp256k1, _Gx, _Gy, 1, _r, generator=True +) + +# Brainpool P-160-r1 +_a = 0x340E7BE2A280EB74E2BE61BADA745D97E8F7C300 +_b = 0x1E589A8595423412134FAA2DBDEC95C8D8675E58 +_p = 0xE95E4A5F737059DC60DFC7AD95B3D8139515620F +_Gx = 0xBED5AF16EA3F6A4F62938C4631EB5AF7BDBCDBC3 +_Gy = 0x1667CB477A1A8EC338F94741669C976316DA6321 +_q = 0xE95E4A5F737059DC60DF5991D45029409E60FC09 + +curve_brainpoolp160r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp160r1 = ellipticcurve.PointJacobi( + curve_brainpoolp160r1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-192-r1 +_a = 0x6A91174076B1E0E19C39C031FE8685C1CAE040E5C69A28EF +_b = 0x469A28EF7C28CCA3DC721D044F4496BCCA7EF4146FBF25C9 +_p = 0xC302F41D932A36CDA7A3463093D18DB78FCE476DE1A86297 +_Gx = 0xC0A0647EAAB6A48753B033C56CB0F0900A2F5C4853375FD6 +_Gy = 0x14B690866ABD5BB88B5F4828C1490002E6773FA2FA299B8F +_q = 0xC302F41D932A36CDA7A3462F9E9E916B5BE8F1029AC4ACC1 + +curve_brainpoolp192r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp192r1 = ellipticcurve.PointJacobi( + curve_brainpoolp192r1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-224-r1 +_a = 0x68A5E62CA9CE6C1C299803A6C1530B514E182AD8B0042A59CAD29F43 +_b = 0x2580F63CCFE44138870713B1A92369E33E2135D266DBB372386C400B +_p = 0xD7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FF +_Gx = 0x0D9029AD2C7E5CF4340823B2A87DC68C9E4CE3174C1E6EFDEE12C07D +_Gy = 0x58AA56F772C0726F24C6B89E4ECDAC24354B9E99CAA3F6D3761402CD +_q = 0xD7C134AA264366862A18302575D0FB98D116BC4B6DDEBCA3A5A7939F + +curve_brainpoolp224r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp224r1 = ellipticcurve.PointJacobi( + curve_brainpoolp224r1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-256-r1 +_a = 0x7D5A0975FC2C3057EEF67530417AFFE7FB8055C126DC5C6CE94A4B44F330B5D9 +_b = 0x26DC5C6CE94A4B44F330B5D9BBD77CBF958416295CF7E1CE6BCCDC18FF8C07B6 +_p = 0xA9FB57DBA1EEA9BC3E660A909D838D726E3BF623D52620282013481D1F6E5377 +_Gx = 0x8BD2AEB9CB7E57CB2C4B482FFC81B7AFB9DE27E1E3BD23C23A4453BD9ACE3262 +_Gy = 0x547EF835C3DAC4FD97F8461A14611DC9C27745132DED8E545C1D54C72F046997 +_q = 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7 + +curve_brainpoolp256r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp256r1 = ellipticcurve.PointJacobi( + curve_brainpoolp256r1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-320-r1 +_a = int( + remove_whitespace( + """ + 3EE30B568FBAB0F883CCEBD46D3F3BB8A2A73513F5EB79DA66190EB085FFA9 + F492F375A97D860EB4""" + ), + 16, +) +_b = int( + remove_whitespace( + """ + 520883949DFDBC42D3AD198640688A6FE13F41349554B49ACC31DCCD884539 + 816F5EB4AC8FB1F1A6""" + ), + 16, +) +_p = int( + remove_whitespace( + """ + D35E472036BC4FB7E13C785ED201E065F98FCFA6F6F40DEF4F92B9EC7893EC + 28FCD412B1F1B32E27""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + 43BD7E9AFB53D8B85289BCC48EE5BFE6F20137D10A087EB6E7871E2A10A599 + C710AF8D0D39E20611""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 14FDD05545EC1CC8AB4093247F77275E0743FFED117182EAA9C77877AAAC6A + C7D35245D1692E8EE1""" + ), + 16, +) +_q = int( + remove_whitespace( + """ + D35E472036BC4FB7E13C785ED201E065F98FCFA5B68F12A32D482EC7EE8658 + E98691555B44C59311""" + ), + 16, +) + +curve_brainpoolp320r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp320r1 = ellipticcurve.PointJacobi( + curve_brainpoolp320r1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-384-r1 +_a = int( + remove_whitespace( + """ + 7BC382C63D8C150C3C72080ACE05AFA0C2BEA28E4FB22787139165EFBA91F9 + 0F8AA5814A503AD4EB04A8C7DD22CE2826""" + ), + 16, +) +_b = int( + remove_whitespace( + """ + 04A8C7DD22CE28268B39B55416F0447C2FB77DE107DCD2A62E880EA53EEB62 + D57CB4390295DBC9943AB78696FA504C11""" + ), + 16, +) +_p = int( + remove_whitespace( + """ + 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B412B1DA197FB711 + 23ACD3A729901D1A71874700133107EC53""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + 1D1C64F068CF45FFA2A63A81B7C13F6B8847A3E77EF14FE3DB7FCAFE0CBD10 + E8E826E03436D646AAEF87B2E247D4AF1E""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 8ABE1D7520F9C2A45CB1EB8E95CFD55262B70B29FEEC5864E19C054FF991292 + 80E4646217791811142820341263C5315""" + ), + 16, +) +_q = int( + remove_whitespace( + """ + 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425 + A7CF3AB6AF6B7FC3103B883202E9046565""" + ), + 16, +) + +curve_brainpoolp384r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp384r1 = ellipticcurve.PointJacobi( + curve_brainpoolp384r1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-512-r1 +_a = int( + remove_whitespace( + """ + 7830A3318B603B89E2327145AC234CC594CBDD8D3DF91610A83441CAEA9863 + BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117A72BF2C7B9E7C1AC4D77FC94CA""" + ), + 16, +) +_b = int( + remove_whitespace( + """ + 3DF91610A83441CAEA9863BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117 + A72BF2C7B9E7C1AC4D77FC94CADC083E67984050B75EBAE5DD2809BD638016F723""" + ), + 16, +) +_p = int( + remove_whitespace( + """ + AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 + 717D4D9B009BC66842AECDA12AE6A380E62881FF2F2D82C68528AA6056583A48F3""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + 81AEE4BDD82ED9645A21322E9C4C6A9385ED9F70B5D916C1B43B62EEF4D009 + 8EFF3B1F78E2D0D48D50D1687B93B97D5F7C6D5047406A5E688B352209BCB9F822""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 7DDE385D566332ECC0EABFA9CF7822FDF209F70024A57B1AA000C55B881F81 + 11B2DCDE494A5F485E5BCA4BD88A2763AED1CA2B2FA8F0540678CD1E0F3AD80892""" + ), + 16, +) +_q = int( + remove_whitespace( + """ + AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 + 70553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069""" + ), + 16, +) + +curve_brainpoolp512r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp512r1 = ellipticcurve.PointJacobi( + curve_brainpoolp512r1, _Gx, _Gy, 1, _q, generator=True +) diff --git a/venv/lib/python3.10/site-packages/ecdsa/ellipticcurve.py b/venv/lib/python3.10/site-packages/ecdsa/ellipticcurve.py new file mode 100644 index 0000000..6d0bc30 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/ellipticcurve.py @@ -0,0 +1,1128 @@ +#! /usr/bin/env python +# -*- coding: utf-8 -*- +# +# Implementation of elliptic curves, for cryptographic applications. +# +# This module doesn't provide any way to choose a random elliptic +# curve, nor to verify that an elliptic curve was chosen randomly, +# because one can simply use NIST's standard curves. +# +# Notes from X9.62-1998 (draft): +# Nomenclature: +# - Q is a public key. +# The "Elliptic Curve Domain Parameters" include: +# - q is the "field size", which in our case equals p. +# - p is a big prime. +# - G is a point of prime order (5.1.1.1). +# - n is the order of G (5.1.1.1). +# Public-key validation (5.2.2): +# - Verify that Q is not the point at infinity. +# - Verify that X_Q and Y_Q are in [0,p-1]. +# - Verify that Q is on the curve. +# - Verify that nQ is the point at infinity. +# Signature generation (5.3): +# - Pick random k from [1,n-1]. +# Signature checking (5.4.2): +# - Verify that r and s are in [1,n-1]. +# +# Revision history: +# 2005.12.31 - Initial version. +# 2008.11.25 - Change CurveFp.is_on to contains_point. +# +# Written in 2005 by Peter Pearson and placed in the public domain. +# Modified extensively as part of python-ecdsa. + +from __future__ import division + +try: + from gmpy2 import mpz + + GMPY = True +except ImportError: # pragma: no branch + try: + from gmpy import mpz + + GMPY = True + except ImportError: + GMPY = False + + +from six import python_2_unicode_compatible +from . import numbertheory +from ._compat import normalise_bytes +from .errors import MalformedPointError +from .util import orderlen, string_to_number, number_to_string + + +@python_2_unicode_compatible +class CurveFp(object): + """Elliptic Curve over the field of integers modulo a prime.""" + + if GMPY: # pragma: no branch + + def __init__(self, p, a, b, h=None): + """ + The curve of points satisfying y^2 = x^3 + a*x + b (mod p). + + h is an integer that is the cofactor of the elliptic curve domain + parameters; it is the number of points satisfying the elliptic + curve equation divided by the order of the base point. It is used + for selection of efficient algorithm for public point verification. + """ + self.__p = mpz(p) + self.__a = mpz(a) + self.__b = mpz(b) + # h is not used in calculations and it can be None, so don't use + # gmpy with it + self.__h = h + + else: # pragma: no branch + + def __init__(self, p, a, b, h=None): + """ + The curve of points satisfying y^2 = x^3 + a*x + b (mod p). + + h is an integer that is the cofactor of the elliptic curve domain + parameters; it is the number of points satisfying the elliptic + curve equation divided by the order of the base point. It is used + for selection of efficient algorithm for public point verification. + """ + self.__p = p + self.__a = a + self.__b = b + self.__h = h + + def __eq__(self, other): + """Return True if other is an identical curve, False otherwise. + + Note: the value of the cofactor of the curve is not taken into account + when comparing curves, as it's derived from the base point and + intrinsic curve characteristic (but it's complex to compute), + only the prime and curve parameters are considered. + """ + if isinstance(other, CurveFp): + p = self.__p + return ( + self.__p == other.__p + and self.__a % p == other.__a % p + and self.__b % p == other.__b % p + ) + return NotImplemented + + def __ne__(self, other): + """Return False if other is an identical curve, True otherwise.""" + return not self == other + + def __hash__(self): + return hash((self.__p, self.__a, self.__b)) + + def p(self): + return self.__p + + def a(self): + return self.__a + + def b(self): + return self.__b + + def cofactor(self): + return self.__h + + def contains_point(self, x, y): + """Is the point (x,y) on this curve?""" + return (y * y - ((x * x + self.__a) * x + self.__b)) % self.__p == 0 + + def __str__(self): + return "CurveFp(p=%d, a=%d, b=%d, h=%d)" % ( + self.__p, + self.__a, + self.__b, + self.__h, + ) + + +class AbstractPoint(object): + """Class for common methods of elliptic curve points.""" + + @staticmethod + def _from_raw_encoding(data, raw_encoding_length): + """ + Decode public point from :term:`raw encoding`. + + :term:`raw encoding` is the same as the :term:`uncompressed` encoding, + but without the 0x04 byte at the beginning. + """ + # real assert, from_bytes() should not call us with different length + assert len(data) == raw_encoding_length + xs = data[: raw_encoding_length // 2] + ys = data[raw_encoding_length // 2 :] + # real assert, raw_encoding_length is calculated by multiplying an + # integer by two so it will always be even + assert len(xs) == raw_encoding_length // 2 + assert len(ys) == raw_encoding_length // 2 + coord_x = string_to_number(xs) + coord_y = string_to_number(ys) + + return coord_x, coord_y + + @staticmethod + def _from_compressed(data, curve): + """Decode public point from compressed encoding.""" + if data[:1] not in (b"\x02", b"\x03"): + raise MalformedPointError("Malformed compressed point encoding") + + is_even = data[:1] == b"\x02" + x = string_to_number(data[1:]) + p = curve.p() + alpha = (pow(x, 3, p) + (curve.a() * x) + curve.b()) % p + try: + beta = numbertheory.square_root_mod_prime(alpha, p) + except numbertheory.SquareRootError as e: + raise MalformedPointError( + "Encoding does not correspond to a point on curve", e + ) + if is_even == bool(beta & 1): + y = p - beta + else: + y = beta + return x, y + + @classmethod + def _from_hybrid(cls, data, raw_encoding_length, validate_encoding): + """Decode public point from hybrid encoding.""" + # real assert, from_bytes() should not call us with different types + assert data[:1] in (b"\x06", b"\x07") + + # primarily use the uncompressed as it's easiest to handle + x, y = cls._from_raw_encoding(data[1:], raw_encoding_length) + + # but validate if it's self-consistent if we're asked to do that + if validate_encoding and ( + y & 1 + and data[:1] != b"\x07" + or (not y & 1) + and data[:1] != b"\x06" + ): + raise MalformedPointError("Inconsistent hybrid point encoding") + + return x, y + + @classmethod + def from_bytes( + cls, curve, data, validate_encoding=True, valid_encodings=None + ): + """ + Initialise the object from byte encoding of a point. + + The method does accept and automatically detect the type of point + encoding used. It supports the :term:`raw encoding`, + :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. + + Note: generally you will want to call the ``from_bytes()`` method of + either a child class, PointJacobi or Point. + + :param data: single point encoding of the public key + :type data: :term:`bytes-like object` + :param curve: the curve on which the public key is expected to lay + :type curve: ecdsa.ellipticcurve.CurveFp + :param validate_encoding: whether to verify that the encoding of the + point is self-consistent, defaults to True, has effect only + on ``hybrid`` encoding + :type validate_encoding: bool + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + :type valid_encodings: :term:`set-like object` + + :raises MalformedPointError: if the public point does not lay on the + curve or the encoding is invalid + + :return: x and y coordinates of the encoded point + :rtype: tuple(int, int) + """ + if not valid_encodings: + valid_encodings = set( + ["uncompressed", "compressed", "hybrid", "raw"] + ) + if not all( + i in set(("uncompressed", "compressed", "hybrid", "raw")) + for i in valid_encodings + ): + raise ValueError( + "Only uncompressed, compressed, hybrid or raw encoding " + "supported." + ) + data = normalise_bytes(data) + key_len = len(data) + raw_encoding_length = 2 * orderlen(curve.p()) + if key_len == raw_encoding_length and "raw" in valid_encodings: + coord_x, coord_y = cls._from_raw_encoding( + data, raw_encoding_length + ) + elif key_len == raw_encoding_length + 1 and ( + "hybrid" in valid_encodings or "uncompressed" in valid_encodings + ): + if data[:1] in (b"\x06", b"\x07") and "hybrid" in valid_encodings: + coord_x, coord_y = cls._from_hybrid( + data, raw_encoding_length, validate_encoding + ) + elif data[:1] == b"\x04" and "uncompressed" in valid_encodings: + coord_x, coord_y = cls._from_raw_encoding( + data[1:], raw_encoding_length + ) + else: + raise MalformedPointError( + "Invalid X9.62 encoding of the public point" + ) + elif ( + key_len == raw_encoding_length // 2 + 1 + and "compressed" in valid_encodings + ): + coord_x, coord_y = cls._from_compressed(data, curve) + else: + raise MalformedPointError( + "Length of string does not match lengths of " + "any of the enabled ({0}) encodings of the " + "curve.".format(", ".join(valid_encodings)) + ) + return coord_x, coord_y + + def _raw_encode(self): + """Convert the point to the :term:`raw encoding`.""" + prime = self.curve().p() + x_str = number_to_string(self.x(), prime) + y_str = number_to_string(self.y(), prime) + return x_str + y_str + + def _compressed_encode(self): + """Encode the point into the compressed form.""" + prime = self.curve().p() + x_str = number_to_string(self.x(), prime) + if self.y() & 1: + return b"\x03" + x_str + return b"\x02" + x_str + + def _hybrid_encode(self): + """Encode the point into the hybrid form.""" + raw_enc = self._raw_encode() + if self.y() & 1: + return b"\x07" + raw_enc + return b"\x06" + raw_enc + + def to_bytes(self, encoding="raw"): + """ + Convert the point to a byte string. + + The method by default uses the :term:`raw encoding` (specified + by `encoding="raw"`. It can also output points in :term:`uncompressed`, + :term:`compressed`, and :term:`hybrid` formats. + + :return: :term:`raw encoding` of a public on the curve + :rtype: bytes + """ + assert encoding in ("raw", "uncompressed", "compressed", "hybrid") + if encoding == "raw": + return self._raw_encode() + elif encoding == "uncompressed": + return b"\x04" + self._raw_encode() + elif encoding == "hybrid": + return self._hybrid_encode() + else: + return self._compressed_encode() + + +class PointJacobi(AbstractPoint): + """ + Point on an elliptic curve. Uses Jacobi coordinates. + + In Jacobian coordinates, there are three parameters, X, Y and Z. + They correspond to affine parameters 'x' and 'y' like so: + + x = X / Z² + y = Y / Z³ + """ + + def __init__(self, curve, x, y, z, order=None, generator=False): + """ + Initialise a point that uses Jacobi representation internally. + + :param CurveFp curve: curve on which the point resides + :param int x: the X parameter of Jacobi representation (equal to x when + converting from affine coordinates + :param int y: the Y parameter of Jacobi representation (equal to y when + converting from affine coordinates + :param int z: the Z parameter of Jacobi representation (equal to 1 when + converting from affine coordinates + :param int order: the point order, must be non zero when using + generator=True + :param bool generator: the point provided is a curve generator, as + such, it will be commonly used with scalar multiplication. This will + cause to precompute multiplication table generation for it + """ + super(PointJacobi, self).__init__() + self.__curve = curve + if GMPY: # pragma: no branch + self.__coords = (mpz(x), mpz(y), mpz(z)) + self.__order = order and mpz(order) + else: # pragma: no branch + self.__coords = (x, y, z) + self.__order = order + self.__generator = generator + self.__precompute = [] + + @classmethod + def from_bytes( + cls, + curve, + data, + validate_encoding=True, + valid_encodings=None, + order=None, + generator=False, + ): + """ + Initialise the object from byte encoding of a point. + + The method does accept and automatically detect the type of point + encoding used. It supports the :term:`raw encoding`, + :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. + + :param data: single point encoding of the public key + :type data: :term:`bytes-like object` + :param curve: the curve on which the public key is expected to lay + :type curve: ecdsa.ellipticcurve.CurveFp + :param validate_encoding: whether to verify that the encoding of the + point is self-consistent, defaults to True, has effect only + on ``hybrid`` encoding + :type validate_encoding: bool + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + :type valid_encodings: :term:`set-like object` + :param int order: the point order, must be non zero when using + generator=True + :param bool generator: the point provided is a curve generator, as + such, it will be commonly used with scalar multiplication. This + will cause to precompute multiplication table generation for it + + :raises MalformedPointError: if the public point does not lay on the + curve or the encoding is invalid + + :return: Point on curve + :rtype: PointJacobi + """ + coord_x, coord_y = super(PointJacobi, cls).from_bytes( + curve, data, validate_encoding, valid_encodings + ) + return PointJacobi(curve, coord_x, coord_y, 1, order, generator) + + def _maybe_precompute(self): + if not self.__generator or self.__precompute: + return + + # since this code will execute just once, and it's fully deterministic, + # depend on atomicity of the last assignment to switch from empty + # self.__precompute to filled one and just ignore the unlikely + # situation when two threads execute it at the same time (as it won't + # lead to inconsistent __precompute) + order = self.__order + assert order + precompute = [] + i = 1 + order *= 2 + coord_x, coord_y, coord_z = self.__coords + doubler = PointJacobi(self.__curve, coord_x, coord_y, coord_z, order) + order *= 2 + precompute.append((doubler.x(), doubler.y())) + + while i < order: + i *= 2 + doubler = doubler.double().scale() + precompute.append((doubler.x(), doubler.y())) + + self.__precompute = precompute + + def __getstate__(self): + # while this code can execute at the same time as _maybe_precompute() + # is updating the __precompute or scale() is updating the __coords, + # there is no requirement for consistency between __coords and + # __precompute + state = self.__dict__.copy() + return state + + def __setstate__(self, state): + self.__dict__.update(state) + + def __eq__(self, other): + """Compare for equality two points with each-other. + + Note: only points that lay on the same curve can be equal. + """ + x1, y1, z1 = self.__coords + if other is INFINITY: + return not y1 or not z1 + if isinstance(other, Point): + x2, y2, z2 = other.x(), other.y(), 1 + elif isinstance(other, PointJacobi): + x2, y2, z2 = other.__coords + else: + return NotImplemented + if self.__curve != other.curve(): + return False + p = self.__curve.p() + + zz1 = z1 * z1 % p + zz2 = z2 * z2 % p + + # compare the fractions by bringing them to the same denominator + # depend on short-circuit to save 4 multiplications in case of + # inequality + return (x1 * zz2 - x2 * zz1) % p == 0 and ( + y1 * zz2 * z2 - y2 * zz1 * z1 + ) % p == 0 + + def __ne__(self, other): + """Compare for inequality two points with each-other.""" + return not self == other + + def order(self): + """Return the order of the point. + + None if it is undefined. + """ + return self.__order + + def curve(self): + """Return curve over which the point is defined.""" + return self.__curve + + def x(self): + """ + Return affine x coordinate. + + This method should be used only when the 'y' coordinate is not needed. + It's computationally more efficient to use `to_affine()` and then + call x() and y() on the returned instance. Or call `scale()` + and then x() and y() on the returned instance. + """ + x, _, z = self.__coords + if z == 1: + return x + p = self.__curve.p() + z = numbertheory.inverse_mod(z, p) + return x * z ** 2 % p + + def y(self): + """ + Return affine y coordinate. + + This method should be used only when the 'x' coordinate is not needed. + It's computationally more efficient to use `to_affine()` and then + call x() and y() on the returned instance. Or call `scale()` + and then x() and y() on the returned instance. + """ + _, y, z = self.__coords + if z == 1: + return y + p = self.__curve.p() + z = numbertheory.inverse_mod(z, p) + return y * z ** 3 % p + + def scale(self): + """ + Return point scaled so that z == 1. + + Modifies point in place, returns self. + """ + x, y, z = self.__coords + if z == 1: + return self + + # scaling is deterministic, so even if two threads execute the below + # code at the same time, they will set __coords to the same value + p = self.__curve.p() + z_inv = numbertheory.inverse_mod(z, p) + zz_inv = z_inv * z_inv % p + x = x * zz_inv % p + y = y * zz_inv * z_inv % p + self.__coords = (x, y, 1) + return self + + def to_affine(self): + """Return point in affine form.""" + _, y, z = self.__coords + if not y or not z: + return INFINITY + self.scale() + x, y, z = self.__coords + return Point(self.__curve, x, y, self.__order) + + @staticmethod + def from_affine(point, generator=False): + """Create from an affine point. + + :param bool generator: set to True to make the point to precalculate + multiplication table - useful for public point when verifying many + signatures (around 100 or so) or for generator points of a curve. + """ + return PointJacobi( + point.curve(), point.x(), point.y(), 1, point.order(), generator + ) + + # please note that all the methods that use the equations from + # hyperelliptic + # are formatted in a way to maximise performance. + # Things that make code faster: multiplying instead of taking to the power + # (`xx = x * x; xxxx = xx * xx % p` is faster than `xxxx = x**4 % p` and + # `pow(x, 4, p)`), + # multiple assignments at the same time (`x1, x2 = self.x1, self.x2` is + # faster than `x1 = self.x1; x2 = self.x2`), + # similarly, sometimes the `% p` is skipped if it makes the calculation + # faster and the result of calculation is later reduced modulo `p` + + def _double_with_z_1(self, X1, Y1, p, a): + """Add a point to itself with z == 1.""" + # after: + # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-mdbl-2007-bl + XX, YY = X1 * X1 % p, Y1 * Y1 % p + if not YY: + return 0, 0, 1 + YYYY = YY * YY % p + S = 2 * ((X1 + YY) ** 2 - XX - YYYY) % p + M = 3 * XX + a + T = (M * M - 2 * S) % p + # X3 = T + Y3 = (M * (S - T) - 8 * YYYY) % p + Z3 = 2 * Y1 % p + return T, Y3, Z3 + + def _double(self, X1, Y1, Z1, p, a): + """Add a point to itself, arbitrary z.""" + if Z1 == 1: + return self._double_with_z_1(X1, Y1, p, a) + if not Y1 or not Z1: + return 0, 0, 1 + # after: + # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-dbl-2007-bl + XX, YY = X1 * X1 % p, Y1 * Y1 % p + if not YY: + return 0, 0, 1 + YYYY = YY * YY % p + ZZ = Z1 * Z1 % p + S = 2 * ((X1 + YY) ** 2 - XX - YYYY) % p + M = (3 * XX + a * ZZ * ZZ) % p + T = (M * M - 2 * S) % p + # X3 = T + Y3 = (M * (S - T) - 8 * YYYY) % p + Z3 = ((Y1 + Z1) ** 2 - YY - ZZ) % p + + return T, Y3, Z3 + + def double(self): + """Add a point to itself.""" + X1, Y1, Z1 = self.__coords + + if not Y1: + return INFINITY + + p, a = self.__curve.p(), self.__curve.a() + + X3, Y3, Z3 = self._double(X1, Y1, Z1, p, a) + + if not Y3 or not Z3: + return INFINITY + return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) + + def _add_with_z_1(self, X1, Y1, X2, Y2, p): + """add points when both Z1 and Z2 equal 1""" + # after: + # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-mmadd-2007-bl + H = X2 - X1 + HH = H * H + I = 4 * HH % p + J = H * I + r = 2 * (Y2 - Y1) + if not H and not r: + return self._double_with_z_1(X1, Y1, p, self.__curve.a()) + V = X1 * I + X3 = (r ** 2 - J - 2 * V) % p + Y3 = (r * (V - X3) - 2 * Y1 * J) % p + Z3 = 2 * H % p + return X3, Y3, Z3 + + def _add_with_z_eq(self, X1, Y1, Z1, X2, Y2, p): + """add points when Z1 == Z2""" + # after: + # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-zadd-2007-m + A = (X2 - X1) ** 2 % p + B = X1 * A % p + C = X2 * A + D = (Y2 - Y1) ** 2 % p + if not A and not D: + return self._double(X1, Y1, Z1, p, self.__curve.a()) + X3 = (D - B - C) % p + Y3 = ((Y2 - Y1) * (B - X3) - Y1 * (C - B)) % p + Z3 = Z1 * (X2 - X1) % p + return X3, Y3, Z3 + + def _add_with_z2_1(self, X1, Y1, Z1, X2, Y2, p): + """add points when Z2 == 1""" + # after: + # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-madd-2007-bl + Z1Z1 = Z1 * Z1 % p + U2, S2 = X2 * Z1Z1 % p, Y2 * Z1 * Z1Z1 % p + H = (U2 - X1) % p + HH = H * H % p + I = 4 * HH % p + J = H * I + r = 2 * (S2 - Y1) % p + if not r and not H: + return self._double_with_z_1(X2, Y2, p, self.__curve.a()) + V = X1 * I + X3 = (r * r - J - 2 * V) % p + Y3 = (r * (V - X3) - 2 * Y1 * J) % p + Z3 = ((Z1 + H) ** 2 - Z1Z1 - HH) % p + return X3, Y3, Z3 + + def _add_with_z_ne(self, X1, Y1, Z1, X2, Y2, Z2, p): + """add points with arbitrary z""" + # after: + # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-add-2007-bl + Z1Z1 = Z1 * Z1 % p + Z2Z2 = Z2 * Z2 % p + U1 = X1 * Z2Z2 % p + U2 = X2 * Z1Z1 % p + S1 = Y1 * Z2 * Z2Z2 % p + S2 = Y2 * Z1 * Z1Z1 % p + H = U2 - U1 + I = 4 * H * H % p + J = H * I % p + r = 2 * (S2 - S1) % p + if not H and not r: + return self._double(X1, Y1, Z1, p, self.__curve.a()) + V = U1 * I + X3 = (r * r - J - 2 * V) % p + Y3 = (r * (V - X3) - 2 * S1 * J) % p + Z3 = ((Z1 + Z2) ** 2 - Z1Z1 - Z2Z2) * H % p + + return X3, Y3, Z3 + + def __radd__(self, other): + """Add other to self.""" + return self + other + + def _add(self, X1, Y1, Z1, X2, Y2, Z2, p): + """add two points, select fastest method.""" + if not Y1 or not Z1: + return X2, Y2, Z2 + if not Y2 or not Z2: + return X1, Y1, Z1 + if Z1 == Z2: + if Z1 == 1: + return self._add_with_z_1(X1, Y1, X2, Y2, p) + return self._add_with_z_eq(X1, Y1, Z1, X2, Y2, p) + if Z1 == 1: + return self._add_with_z2_1(X2, Y2, Z2, X1, Y1, p) + if Z2 == 1: + return self._add_with_z2_1(X1, Y1, Z1, X2, Y2, p) + return self._add_with_z_ne(X1, Y1, Z1, X2, Y2, Z2, p) + + def __add__(self, other): + """Add two points on elliptic curve.""" + if self == INFINITY: + return other + if other == INFINITY: + return self + if isinstance(other, Point): + other = PointJacobi.from_affine(other) + if self.__curve != other.__curve: + raise ValueError("The other point is on different curve") + + p = self.__curve.p() + X1, Y1, Z1 = self.__coords + X2, Y2, Z2 = other.__coords + + X3, Y3, Z3 = self._add(X1, Y1, Z1, X2, Y2, Z2, p) + + if not Y3 or not Z3: + return INFINITY + return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) + + def __rmul__(self, other): + """Multiply point by an integer.""" + return self * other + + def _mul_precompute(self, other): + """Multiply point by integer with precomputation table.""" + X3, Y3, Z3, p = 0, 0, 1, self.__curve.p() + _add = self._add + for X2, Y2 in self.__precompute: + if other % 2: + if other % 4 >= 2: + other = (other + 1) // 2 + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, 1, p) + else: + other = (other - 1) // 2 + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p) + else: + other //= 2 + + if not Y3 or not Z3: + return INFINITY + return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) + + @staticmethod + def _naf(mult): + """Calculate non-adjacent form of number.""" + ret = [] + while mult: + if mult % 2: + nd = mult % 4 + if nd >= 2: + nd -= 4 + ret.append(nd) + mult -= nd + else: + ret.append(0) + mult //= 2 + return ret + + def __mul__(self, other): + """Multiply point by an integer.""" + if not self.__coords[1] or not other: + return INFINITY + if other == 1: + return self + if self.__order: + # order*2 as a protection for Minerva + other = other % (self.__order * 2) + self._maybe_precompute() + if self.__precompute: + return self._mul_precompute(other) + + self = self.scale() + X2, Y2, _ = self.__coords + X3, Y3, Z3 = 0, 0, 1 + p, a = self.__curve.p(), self.__curve.a() + _double = self._double + _add = self._add + # since adding points when at least one of them is scaled + # is quicker, reverse the NAF order + for i in reversed(self._naf(other)): + X3, Y3, Z3 = _double(X3, Y3, Z3, p, a) + if i < 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, 1, p) + elif i > 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p) + + if not Y3 or not Z3: + return INFINITY + + return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) + + def mul_add(self, self_mul, other, other_mul): + """ + Do two multiplications at the same time, add results. + + calculates self*self_mul + other*other_mul + """ + if other == INFINITY or other_mul == 0: + return self * self_mul + if self_mul == 0: + return other * other_mul + if not isinstance(other, PointJacobi): + other = PointJacobi.from_affine(other) + # when the points have precomputed answers, then multiplying them alone + # is faster (as it uses NAF and no point doublings) + self._maybe_precompute() + other._maybe_precompute() + if self.__precompute and other.__precompute: + return self * self_mul + other * other_mul + + if self.__order: + self_mul = self_mul % self.__order + other_mul = other_mul % self.__order + + # (X3, Y3, Z3) is the accumulator + X3, Y3, Z3 = 0, 0, 1 + p, a = self.__curve.p(), self.__curve.a() + + # as we have 6 unique points to work with, we can't scale all of them, + # but do scale the ones that are used most often + self.scale() + X1, Y1, Z1 = self.__coords + other.scale() + X2, Y2, Z2 = other.__coords + + _double = self._double + _add = self._add + + # with NAF we have 3 options: no add, subtract, add + # so with 2 points, we have 9 combinations: + # 0, -A, +A, -B, -A-B, +A-B, +B, -A+B, +A+B + # so we need 4 combined points: + mAmB_X, mAmB_Y, mAmB_Z = _add(X1, -Y1, Z1, X2, -Y2, Z2, p) + pAmB_X, pAmB_Y, pAmB_Z = _add(X1, Y1, Z1, X2, -Y2, Z2, p) + mApB_X, mApB_Y, mApB_Z = _add(X1, -Y1, Z1, X2, Y2, Z2, p) + pApB_X, pApB_Y, pApB_Z = _add(X1, Y1, Z1, X2, Y2, Z2, p) + # when the self and other sum to infinity, we need to add them + # one by one to get correct result but as that's very unlikely to + # happen in regular operation, we don't need to optimise this case + if not pApB_Y or not pApB_Z: + return self * self_mul + other * other_mul + + # gmp object creation has cumulatively higher overhead than the + # speedup we get from calculating the NAF using gmp so ensure use + # of int() + self_naf = list(reversed(self._naf(int(self_mul)))) + other_naf = list(reversed(self._naf(int(other_mul)))) + # ensure that the lists are the same length (zip() will truncate + # longer one otherwise) + if len(self_naf) < len(other_naf): + self_naf = [0] * (len(other_naf) - len(self_naf)) + self_naf + elif len(self_naf) > len(other_naf): + other_naf = [0] * (len(self_naf) - len(other_naf)) + other_naf + + for A, B in zip(self_naf, other_naf): + X3, Y3, Z3 = _double(X3, Y3, Z3, p, a) + + # conditions ordered from most to least likely + if A == 0: + if B == 0: + pass + elif B < 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, Z2, p) + else: + assert B > 0 + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, Z2, p) + elif A < 0: + if B == 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, X1, -Y1, Z1, p) + elif B < 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, mAmB_X, mAmB_Y, mAmB_Z, p) + else: + assert B > 0 + X3, Y3, Z3 = _add(X3, Y3, Z3, mApB_X, mApB_Y, mApB_Z, p) + else: + assert A > 0 + if B == 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, X1, Y1, Z1, p) + elif B < 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, pAmB_X, pAmB_Y, pAmB_Z, p) + else: + assert B > 0 + X3, Y3, Z3 = _add(X3, Y3, Z3, pApB_X, pApB_Y, pApB_Z, p) + + if not Y3 or not Z3: + return INFINITY + + return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) + + def __neg__(self): + """Return negated point.""" + x, y, z = self.__coords + return PointJacobi(self.__curve, x, -y, z, self.__order) + + +class Point(AbstractPoint): + """A point on an elliptic curve. Altering x and y is forbidden, + but they can be read by the x() and y() methods.""" + + def __init__(self, curve, x, y, order=None): + """curve, x, y, order; order (optional) is the order of this point.""" + super(Point, self).__init__() + self.__curve = curve + if GMPY: + self.__x = x and mpz(x) + self.__y = y and mpz(y) + self.__order = order and mpz(order) + else: + self.__x = x + self.__y = y + self.__order = order + # self.curve is allowed to be None only for INFINITY: + if self.__curve: + assert self.__curve.contains_point(x, y) + # for curves with cofactor 1, all points that are on the curve are + # scalar multiples of the base point, so performing multiplication is + # not necessary to verify that. See Section 3.2.2.1 of SEC 1 v2 + if curve and curve.cofactor() != 1 and order: + assert self * order == INFINITY + + @classmethod + def from_bytes( + cls, + curve, + data, + validate_encoding=True, + valid_encodings=None, + order=None, + ): + """ + Initialise the object from byte encoding of a point. + + The method does accept and automatically detect the type of point + encoding used. It supports the :term:`raw encoding`, + :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. + + :param data: single point encoding of the public key + :type data: :term:`bytes-like object` + :param curve: the curve on which the public key is expected to lay + :type curve: ecdsa.ellipticcurve.CurveFp + :param validate_encoding: whether to verify that the encoding of the + point is self-consistent, defaults to True, has effect only + on ``hybrid`` encoding + :type validate_encoding: bool + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + :type valid_encodings: :term:`set-like object` + :param int order: the point order, must be non zero when using + generator=True + + :raises MalformedPointError: if the public point does not lay on the + curve or the encoding is invalid + + :return: Point on curve + :rtype: Point + """ + coord_x, coord_y = super(Point, cls).from_bytes( + curve, data, validate_encoding, valid_encodings + ) + return Point(curve, coord_x, coord_y, order) + + def __eq__(self, other): + """Return True if the points are identical, False otherwise. + + Note: only points that lay on the same curve can be equal. + """ + if isinstance(other, Point): + return ( + self.__curve == other.__curve + and self.__x == other.__x + and self.__y == other.__y + ) + return NotImplemented + + def __ne__(self, other): + """Returns False if points are identical, True otherwise.""" + return not self == other + + def __neg__(self): + return Point(self.__curve, self.__x, self.__curve.p() - self.__y) + + def __add__(self, other): + """Add one point to another point.""" + + # X9.62 B.3: + + if not isinstance(other, Point): + return NotImplemented + if other == INFINITY: + return self + if self == INFINITY: + return other + assert self.__curve == other.__curve + if self.__x == other.__x: + if (self.__y + other.__y) % self.__curve.p() == 0: + return INFINITY + else: + return self.double() + + p = self.__curve.p() + + l = ( + (other.__y - self.__y) + * numbertheory.inverse_mod(other.__x - self.__x, p) + ) % p + + x3 = (l * l - self.__x - other.__x) % p + y3 = (l * (self.__x - x3) - self.__y) % p + + return Point(self.__curve, x3, y3) + + def __mul__(self, other): + """Multiply a point by an integer.""" + + def leftmost_bit(x): + assert x > 0 + result = 1 + while result <= x: + result = 2 * result + return result // 2 + + e = other + if e == 0 or (self.__order and e % self.__order == 0): + return INFINITY + if self == INFINITY: + return INFINITY + if e < 0: + return (-self) * (-e) + + # From X9.62 D.3.2: + + e3 = 3 * e + negative_self = Point(self.__curve, self.__x, -self.__y, self.__order) + i = leftmost_bit(e3) // 2 + result = self + # print_("Multiplying %s by %d (e3 = %d):" % (self, other, e3)) + while i > 1: + result = result.double() + if (e3 & i) != 0 and (e & i) == 0: + result = result + self + if (e3 & i) == 0 and (e & i) != 0: + result = result + negative_self + # print_(". . . i = %d, result = %s" % ( i, result )) + i = i // 2 + + return result + + def __rmul__(self, other): + """Multiply a point by an integer.""" + + return self * other + + def __str__(self): + if self == INFINITY: + return "infinity" + return "(%d,%d)" % (self.__x, self.__y) + + def double(self): + """Return a new point that is twice the old.""" + + if self == INFINITY: + return INFINITY + + # X9.62 B.3: + + p = self.__curve.p() + a = self.__curve.a() + + l = ( + (3 * self.__x * self.__x + a) + * numbertheory.inverse_mod(2 * self.__y, p) + ) % p + + x3 = (l * l - 2 * self.__x) % p + y3 = (l * (self.__x - x3) - self.__y) % p + + return Point(self.__curve, x3, y3) + + def x(self): + return self.__x + + def y(self): + return self.__y + + def curve(self): + return self.__curve + + def order(self): + return self.__order + + +# This one point is the Point At Infinity for all purposes: +INFINITY = Point(None, None, None) diff --git a/venv/lib/python3.10/site-packages/ecdsa/errors.py b/venv/lib/python3.10/site-packages/ecdsa/errors.py new file mode 100644 index 0000000..0184c05 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/errors.py @@ -0,0 +1,4 @@ +class MalformedPointError(AssertionError): + """Raised in case the encoding of private or public key is malformed.""" + + pass diff --git a/venv/lib/python3.10/site-packages/ecdsa/keys.py b/venv/lib/python3.10/site-packages/ecdsa/keys.py new file mode 100644 index 0000000..29c77d1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/keys.py @@ -0,0 +1,1483 @@ +""" +Primary classes for performing signing and verification operations. + +.. glossary:: + + raw encoding + Conversion of public, private keys and signatures (which in + mathematical sense are integers or pairs of integers) to strings of + bytes that does not use any special tags or encoding rules. + For any given curve, all keys of the same type or signatures will be + encoded to byte strings of the same length. In more formal sense, + the integers are encoded as big-endian, constant length byte strings, + where the string length is determined by the curve order (e.g. + for NIST256p the order is 256 bits long, so the private key will be 32 + bytes long while public key will be 64 bytes long). The encoding of a + single integer is zero-padded on the left if the numerical value is + low. In case of public keys and signatures, which are comprised of two + integers, the integers are simply concatenated. + + uncompressed + The most common formatting specified in PKIX standards. Specified in + X9.62 and SEC1 standards. The only difference between it and + :term:`raw encoding` is the prepending of a 0x04 byte. Thus an + uncompressed NIST256p public key encoding will be 65 bytes long. + + compressed + The public point representation that uses half of bytes of the + :term:`uncompressed` encoding (rounded up). It uses the first byte of + the encoding to specify the sign of the y coordinate and encodes the + x coordinate as-is. The first byte of the encoding is equal to + 0x02 or 0x03. Compressed encoding of NIST256p public key will be 33 + bytes long. + + hybrid + A combination of :term:`uncompressed` and :term:`compressed` encodings. + Both x and y coordinates are stored just as in :term:`compressed` + encoding, but the first byte reflects the sign of the y coordinate. The + first byte of the encoding will be equal to 0x06 or 0x7. Hybrid + encoding of NIST256p public key will be 65 bytes long. + + PEM + The acronym stands for Privacy Enhanced Email, but currently it is used + primarily as the way to encode :term:`DER` objects into text that can + be either easily copy-pasted or transferred over email. + It uses headers like ``-----BEGIN -----`` and footers + like ``-----END -----`` to separate multiple + types of objects in the same file or the object from the surrounding + comments. The actual object stored is base64 encoded. + + DER + Distinguished Encoding Rules, the way to encode :term:`ASN.1` objects + deterministically and uniquely into byte strings. + + ASN.1 + Abstract Syntax Notation 1 is a standard description language for + specifying serialisation and deserialisation of data structures in a + portable and cross-platform way. + + bytes-like object + All the types that implement the buffer protocol. That includes + ``str`` (only on python2), ``bytes``, ``bytesarray``, ``array.array` + and ``memoryview`` of those objects. + Please note that ``array.array` serialisation (converting it to byte + string) is endianess dependant! Signature computed over ``array.array`` + of integers on a big-endian system will not be verified on a + little-endian system and vice-versa. + + set-like object + All the types that support the ``in`` operator, like ``list``, + ``tuple``, ``set``, ``frozenset``, etc. +""" + +import binascii +from hashlib import sha1 +from six import PY2, b +from . import ecdsa +from . import der +from . import rfc6979 +from . import ellipticcurve +from .curves import NIST192p, Curve +from .ecdsa import RSZeroError +from .util import string_to_number, number_to_string, randrange +from .util import sigencode_string, sigdecode_string, bit_length +from .util import ( + oid_ecPublicKey, + encoded_oid_ecPublicKey, + oid_ecDH, + oid_ecMQV, + MalformedSignature, +) +from ._compat import normalise_bytes +from .errors import MalformedPointError +from .ellipticcurve import PointJacobi + + +__all__ = [ + "BadSignatureError", + "BadDigestError", + "VerifyingKey", + "SigningKey", + "MalformedPointError", +] + + +class BadSignatureError(Exception): + """ + Raised when verification of signature failed. + + Will be raised irrespective of reason of the failure: + + * the calculated or provided hash does not match the signature + * the signature does not match the curve/public key + * the encoding of the signature is malformed + * the size of the signature does not match the curve of the VerifyingKey + """ + + pass + + +class BadDigestError(Exception): + """Raised in case the selected hash is too large for the curve.""" + + pass + + +def _truncate_and_convert_digest(digest, curve, allow_truncate): + """Truncates and converts digest to an integer.""" + if not allow_truncate: + if len(digest) > curve.baselen: + raise BadDigestError( + "this curve ({0}) is too short " + "for the length of your digest ({1})".format( + curve.name, 8 * len(digest) + ) + ) + else: + digest = digest[: curve.baselen] + number = string_to_number(digest) + if allow_truncate: + max_length = bit_length(curve.order) + # we don't use bit_length(number) as that truncates leading zeros + length = len(digest) * 8 + + # See NIST FIPS 186-4: + # + # When the length of the output of the hash function is greater + # than N (i.e., the bit length of q), then the leftmost N bits of + # the hash function output block shall be used in any calculation + # using the hash function output during the generation or + # verification of a digital signature. + # + # as such, we need to shift-out the low-order bits: + number >>= max(0, length - max_length) + + return number + + +class VerifyingKey(object): + """ + Class for handling keys that can verify signatures (public keys). + + :ivar ecdsa.curves.Curve curve: The Curve over which all the cryptographic + operations will take place + :ivar default_hashfunc: the function that will be used for hashing the + data. Should implement the same API as hashlib.sha1 + :vartype default_hashfunc: callable + :ivar pubkey: the actual public key + :vartype pubkey: ecdsa.ecdsa.Public_key + """ + + def __init__(self, _error__please_use_generate=None): + """Unsupported, please use one of the classmethods to initialise.""" + if not _error__please_use_generate: + raise TypeError( + "Please use VerifyingKey.generate() to construct me" + ) + self.curve = None + self.default_hashfunc = None + self.pubkey = None + + def __repr__(self): + pub_key = self.to_string("compressed") + return "VerifyingKey.from_string({0!r}, {1!r}, {2})".format( + pub_key, self.curve, self.default_hashfunc().name + ) + + def __eq__(self, other): + """Return True if the points are identical, False otherwise.""" + if isinstance(other, VerifyingKey): + return self.curve == other.curve and self.pubkey == other.pubkey + return NotImplemented + + def __ne__(self, other): + """Return False if the points are identical, True otherwise.""" + return not self == other + + @classmethod + def from_public_point( + cls, point, curve=NIST192p, hashfunc=sha1, validate_point=True + ): + """ + Initialise the object from a Point object. + + This is a low-level method, generally you will not want to use it. + + :param point: The point to wrap around, the actual public key + :type point: ecdsa.ellipticcurve.Point + :param curve: The curve on which the point needs to reside, defaults + to NIST192p + :type curve: ecdsa.curves.Curve + :param hashfunc: The default hash function that will be used for + verification, needs to implement the same interface + as hashlib.sha1 + :type hashfunc: callable + :type bool validate_point: whether to check if the point lays on curve + should always be used if the public point is not a result + of our own calculation + + :raises MalformedPointError: if the public point does not lay on the + curve + + :return: Initialised VerifyingKey object + :rtype: VerifyingKey + """ + self = cls(_error__please_use_generate=True) + if not isinstance(point, ellipticcurve.PointJacobi): + point = ellipticcurve.PointJacobi.from_affine(point) + self.curve = curve + self.default_hashfunc = hashfunc + try: + self.pubkey = ecdsa.Public_key( + curve.generator, point, validate_point + ) + except ecdsa.InvalidPointError: + raise MalformedPointError("Point does not lay on the curve") + self.pubkey.order = curve.order + return self + + def precompute(self, lazy=False): + """ + Precompute multiplication tables for faster signature verification. + + Calling this method will cause the library to precompute the + scalar multiplication tables, used in signature verification. + While it's an expensive operation (comparable to performing + as many signatures as the bit size of the curve, i.e. 256 for NIST256p) + it speeds up verification 2 times. You should call this method + if you expect to verify hundreds of signatures (or more) using the same + VerifyingKey object. + + Note: You should call this method only once, this method generates a + new precomputation table every time it's called. + + :param bool lazy: whether to calculate the precomputation table now + (if set to False) or if it should be delayed to the time of first + use (when set to True) + """ + self.pubkey.point = ellipticcurve.PointJacobi.from_affine( + self.pubkey.point, True + ) + # as precomputation in now delayed to the time of first use of the + # point and we were asked specifically to precompute now, make + # sure the precomputation is performed now to preserve the behaviour + if not lazy: + self.pubkey.point * 2 + + @classmethod + def from_string( + cls, + string, + curve=NIST192p, + hashfunc=sha1, + validate_point=True, + valid_encodings=None, + ): + """ + Initialise the object from byte encoding of public key. + + The method does accept and automatically detect the type of point + encoding used. It supports the :term:`raw encoding`, + :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. + + Note, while the method is named "from_string" it's a misnomer from + Python 2 days when there were no binary strings. In Python 3 the + input needs to be a bytes-like object. + + :param string: single point encoding of the public key + :type string: :term:`bytes-like object` + :param curve: the curve on which the public key is expected to lay + :type curve: ecdsa.curves.Curve + :param hashfunc: The default hash function that will be used for + verification, needs to implement the same interface as hashlib.sha1 + :type hashfunc: callable + :param validate_point: whether to verify that the point lays on the + provided curve or not, defaults to True + :type validate_point: bool + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + :type valid_encodings: :term:`set-like object` + + :raises MalformedPointError: if the public point does not lay on the + curve or the encoding is invalid + + :return: Initialised VerifyingKey object + :rtype: VerifyingKey + """ + point = PointJacobi.from_bytes( + curve.curve, + string, + validate_encoding=validate_point, + valid_encodings=valid_encodings, + ) + return cls.from_public_point(point, curve, hashfunc, validate_point) + + @classmethod + def from_pem( + cls, + string, + hashfunc=sha1, + valid_encodings=None, + valid_curve_encodings=None, + ): + """ + Initialise from public key stored in :term:`PEM` format. + + The PEM header of the key should be ``BEGIN PUBLIC KEY``. + + See the :func:`~VerifyingKey.from_der()` method for details of the + format supported. + + Note: only a single PEM object decoding is supported in provided + string. + + :param string: text with PEM-encoded public ECDSA key + :type string: str + :param valid_encodings: list of allowed point encodings. + By default :term:`uncompressed`, :term:`compressed`, and + :term:`hybrid`. To read malformed files, include + :term:`raw encoding` with ``raw`` in the list. + :type valid_encodings: :term:`set-like object + :param valid_curve_encodings: list of allowed encoding formats + for curve parameters. By default (``None``) all are supported: + ``named_curve`` and ``explicit``. + :type valid_curve_encodings: :term:`set-like object` + + + :return: Initialised VerifyingKey object + :rtype: VerifyingKey + """ + return cls.from_der( + der.unpem(string), + hashfunc=hashfunc, + valid_encodings=valid_encodings, + valid_curve_encodings=valid_curve_encodings, + ) + + @classmethod + def from_der( + cls, + string, + hashfunc=sha1, + valid_encodings=None, + valid_curve_encodings=None, + ): + """ + Initialise the key stored in :term:`DER` format. + + The expected format of the key is the SubjectPublicKeyInfo structure + from RFC5912 (for RSA keys, it's known as the PKCS#1 format):: + + SubjectPublicKeyInfo {PUBLIC-KEY: IOSet} ::= SEQUENCE { + algorithm AlgorithmIdentifier {PUBLIC-KEY, {IOSet}}, + subjectPublicKey BIT STRING + } + + Note: only public EC keys are supported by this method. The + SubjectPublicKeyInfo.algorithm.algorithm field must specify + id-ecPublicKey (see RFC3279). + + Only the named curve encoding is supported, thus the + SubjectPublicKeyInfo.algorithm.parameters field needs to be an + object identifier. A sequence in that field indicates an explicit + parameter curve encoding, this format is not supported. A NULL object + in that field indicates an "implicitlyCA" encoding, where the curve + parameters come from CA certificate, those, again, are not supported. + + :param string: binary string with the DER encoding of public ECDSA key + :type string: bytes-like object + :param valid_encodings: list of allowed point encodings. + By default :term:`uncompressed`, :term:`compressed`, and + :term:`hybrid`. To read malformed files, include + :term:`raw encoding` with ``raw`` in the list. + :type valid_encodings: :term:`set-like object + :param valid_curve_encodings: list of allowed encoding formats + for curve parameters. By default (``None``) all are supported: + ``named_curve`` and ``explicit``. + :type valid_curve_encodings: :term:`set-like object` + + :return: Initialised VerifyingKey object + :rtype: VerifyingKey + """ + if valid_encodings is None: + valid_encodings = set(["uncompressed", "compressed", "hybrid"]) + string = normalise_bytes(string) + # [[oid_ecPublicKey,oid_curve], point_str_bitstring] + s1, empty = der.remove_sequence(string) + if empty != b"": + raise der.UnexpectedDER( + "trailing junk after DER pubkey: %s" % binascii.hexlify(empty) + ) + s2, point_str_bitstring = der.remove_sequence(s1) + # s2 = oid_ecPublicKey,oid_curve + oid_pk, rest = der.remove_object(s2) + if not oid_pk == oid_ecPublicKey: + raise der.UnexpectedDER( + "Unexpected object identifier in DER " + "encoding: {0!r}".format(oid_pk) + ) + curve = Curve.from_der(rest, valid_curve_encodings) + point_str, empty = der.remove_bitstring(point_str_bitstring, 0) + if empty != b"": + raise der.UnexpectedDER( + "trailing junk after pubkey pointstring: %s" + % binascii.hexlify(empty) + ) + # raw encoding of point is invalid in DER files + if len(point_str) == curve.verifying_key_length: + raise der.UnexpectedDER("Malformed encoding of public point") + return cls.from_string( + point_str, + curve, + hashfunc=hashfunc, + valid_encodings=valid_encodings, + ) + + @classmethod + def from_public_key_recovery( + cls, + signature, + data, + curve, + hashfunc=sha1, + sigdecode=sigdecode_string, + allow_truncate=True, + ): + """ + Return keys that can be used as verifiers of the provided signature. + + Tries to recover the public key that can be used to verify the + signature, usually returns two keys like that. + + :param signature: the byte string with the encoded signature + :type signature: bytes-like object + :param data: the data to be hashed for signature verification + :type data: bytes-like object + :param curve: the curve over which the signature was performed + :type curve: ecdsa.curves.Curve + :param hashfunc: The default hash function that will be used for + verification, needs to implement the same interface as hashlib.sha1 + :type hashfunc: callable + :param sigdecode: Callable to define the way the signature needs to + be decoded to an object, needs to handle `signature` as the + first parameter, the curve order (an int) as the second and return + a tuple with two integers, "r" as the first one and "s" as the + second one. See :func:`ecdsa.util.sigdecode_string` and + :func:`ecdsa.util.sigdecode_der` for examples. + :param bool allow_truncate: if True, the provided hashfunc can generate + values larger than the bit size of the order of the curve, the + extra bits (at the end of the digest) will be truncated. + :type sigdecode: callable + + :return: Initialised VerifyingKey objects + :rtype: list of VerifyingKey + """ + data = normalise_bytes(data) + digest = hashfunc(data).digest() + return cls.from_public_key_recovery_with_digest( + signature, + digest, + curve, + hashfunc=hashfunc, + sigdecode=sigdecode, + allow_truncate=allow_truncate, + ) + + @classmethod + def from_public_key_recovery_with_digest( + cls, + signature, + digest, + curve, + hashfunc=sha1, + sigdecode=sigdecode_string, + allow_truncate=False, + ): + """ + Return keys that can be used as verifiers of the provided signature. + + Tries to recover the public key that can be used to verify the + signature, usually returns two keys like that. + + :param signature: the byte string with the encoded signature + :type signature: bytes-like object + :param digest: the hash value of the message signed by the signature + :type digest: bytes-like object + :param curve: the curve over which the signature was performed + :type curve: ecdsa.curves.Curve + :param hashfunc: The default hash function that will be used for + verification, needs to implement the same interface as hashlib.sha1 + :type hashfunc: callable + :param sigdecode: Callable to define the way the signature needs to + be decoded to an object, needs to handle `signature` as the + first parameter, the curve order (an int) as the second and return + a tuple with two integers, "r" as the first one and "s" as the + second one. See :func:`ecdsa.util.sigdecode_string` and + :func:`ecdsa.util.sigdecode_der` for examples. + :type sigdecode: callable + :param bool allow_truncate: if True, the provided hashfunc can generate + values larger than the bit size of the order of the curve (and + the length of provided `digest`), the extra bits (at the end of the + digest) will be truncated. + + :return: Initialised VerifyingKey object + :rtype: VerifyingKey + """ + generator = curve.generator + r, s = sigdecode(signature, generator.order()) + sig = ecdsa.Signature(r, s) + + digest = normalise_bytes(digest) + digest_as_number = _truncate_and_convert_digest( + digest, curve, allow_truncate + ) + pks = sig.recover_public_keys(digest_as_number, generator) + + # Transforms the ecdsa.Public_key object into a VerifyingKey + verifying_keys = [ + cls.from_public_point(pk.point, curve, hashfunc) for pk in pks + ] + return verifying_keys + + def to_string(self, encoding="raw"): + """ + Convert the public key to a byte string. + + The method by default uses the :term:`raw encoding` (specified + by `encoding="raw"`. It can also output keys in :term:`uncompressed`, + :term:`compressed` and :term:`hybrid` formats. + + Remember that the curve identification is not part of the encoding + so to decode the point using :func:`~VerifyingKey.from_string`, curve + needs to be specified. + + Note: while the method is called "to_string", it's a misnomer from + Python 2 days when character strings and byte strings shared type. + On Python 3 the returned type will be `bytes`. + + :return: :term:`raw encoding` of the public key (public point) on the + curve + :rtype: bytes + """ + assert encoding in ("raw", "uncompressed", "compressed", "hybrid") + return self.pubkey.point.to_bytes(encoding) + + def to_pem( + self, point_encoding="uncompressed", curve_parameters_encoding=None + ): + """ + Convert the public key to the :term:`PEM` format. + + The PEM header of the key will be ``BEGIN PUBLIC KEY``. + + The format of the key is described in the + :func:`~VerifyingKey.from_der()` method. + This method supports only "named curve" encoding of keys. + + :param str point_encoding: specification of the encoding format + of public keys. "uncompressed" is most portable, "compressed" is + smallest. "hybrid" is uncommon and unsupported by most + implementations, it is as big as "uncompressed". + :param str curve_parameters_encoding: the encoding for curve parameters + to use, by default tries to use ``named_curve`` encoding, + if that is not possible, falls back to ``named_curve`` encoding. + + :return: portable encoding of the public key + :rtype: bytes + + .. warning:: The PEM is encoded to US-ASCII, it needs to be + re-encoded if the system is incompatible (e.g. uses UTF-16) + """ + return der.topem( + self.to_der(point_encoding, curve_parameters_encoding), + "PUBLIC KEY", + ) + + def to_der( + self, point_encoding="uncompressed", curve_parameters_encoding=None + ): + """ + Convert the public key to the :term:`DER` format. + + The format of the key is described in the + :func:`~VerifyingKey.from_der()` method. + This method supports only "named curve" encoding of keys. + + :param str point_encoding: specification of the encoding format + of public keys. "uncompressed" is most portable, "compressed" is + smallest. "hybrid" is uncommon and unsupported by most + implementations, it is as big as "uncompressed". + :param str curve_parameters_encoding: the encoding for curve parameters + to use, by default tries to use ``named_curve`` encoding, + if that is not possible, falls back to ``named_curve`` encoding. + + :return: DER encoding of the public key + :rtype: bytes + """ + if point_encoding == "raw": + raise ValueError("raw point_encoding not allowed in DER") + point_str = self.to_string(point_encoding) + return der.encode_sequence( + der.encode_sequence( + encoded_oid_ecPublicKey, + self.curve.to_der(curve_parameters_encoding), + ), + # 0 is the number of unused bits in the + # bit string + der.encode_bitstring(point_str, 0), + ) + + def verify( + self, + signature, + data, + hashfunc=None, + sigdecode=sigdecode_string, + allow_truncate=True, + ): + """ + Verify a signature made over provided data. + + Will hash `data` to verify the signature. + + By default expects signature in :term:`raw encoding`. Can also be used + to verify signatures in ASN.1 DER encoding by using + :func:`ecdsa.util.sigdecode_der` + as the `sigdecode` parameter. + + :param signature: encoding of the signature + :type signature: sigdecode method dependant + :param data: data signed by the `signature`, will be hashed using + `hashfunc`, if specified, or default hash function + :type data: bytes like object + :param hashfunc: The default hash function that will be used for + verification, needs to implement the same interface as hashlib.sha1 + :type hashfunc: callable + :param sigdecode: Callable to define the way the signature needs to + be decoded to an object, needs to handle `signature` as the + first parameter, the curve order (an int) as the second and return + a tuple with two integers, "r" as the first one and "s" as the + second one. See :func:`ecdsa.util.sigdecode_string` and + :func:`ecdsa.util.sigdecode_der` for examples. + :type sigdecode: callable + :param bool allow_truncate: if True, the provided digest can have + bigger bit-size than the order of the curve, the extra bits (at + the end of the digest) will be truncated. Use it when verifying + SHA-384 output using NIST256p or in similar situations. Defaults to + True. + + :raises BadSignatureError: if the signature is invalid or malformed + + :return: True if the verification was successful + :rtype: bool + """ + # signature doesn't have to be a bytes-like-object so don't normalise + # it, the decoders will do that + data = normalise_bytes(data) + + hashfunc = hashfunc or self.default_hashfunc + digest = hashfunc(data).digest() + return self.verify_digest(signature, digest, sigdecode, allow_truncate) + + def verify_digest( + self, + signature, + digest, + sigdecode=sigdecode_string, + allow_truncate=False, + ): + """ + Verify a signature made over provided hash value. + + By default expects signature in :term:`raw encoding`. Can also be used + to verify signatures in ASN.1 DER encoding by using + :func:`ecdsa.util.sigdecode_der` + as the `sigdecode` parameter. + + :param signature: encoding of the signature + :type signature: sigdecode method dependant + :param digest: raw hash value that the signature authenticates. + :type digest: bytes like object + :param sigdecode: Callable to define the way the signature needs to + be decoded to an object, needs to handle `signature` as the + first parameter, the curve order (an int) as the second and return + a tuple with two integers, "r" as the first one and "s" as the + second one. See :func:`ecdsa.util.sigdecode_string` and + :func:`ecdsa.util.sigdecode_der` for examples. + :type sigdecode: callable + :param bool allow_truncate: if True, the provided digest can have + bigger bit-size than the order of the curve, the extra bits (at + the end of the digest) will be truncated. Use it when verifying + SHA-384 output using NIST256p or in similar situations. + + :raises BadSignatureError: if the signature is invalid or malformed + :raises BadDigestError: if the provided digest is too big for the curve + associated with this VerifyingKey and allow_truncate was not set + + :return: True if the verification was successful + :rtype: bool + """ + # signature doesn't have to be a bytes-like-object so don't normalise + # it, the decoders will do that + digest = normalise_bytes(digest) + number = _truncate_and_convert_digest( + digest, self.curve, allow_truncate, + ) + + try: + r, s = sigdecode(signature, self.pubkey.order) + except (der.UnexpectedDER, MalformedSignature) as e: + raise BadSignatureError("Malformed formatting of signature", e) + sig = ecdsa.Signature(r, s) + if self.pubkey.verifies(number, sig): + return True + raise BadSignatureError("Signature verification failed") + + +class SigningKey(object): + """ + Class for handling keys that can create signatures (private keys). + + :ivar ecdsa.curves.Curve curve: The Curve over which all the cryptographic + operations will take place + :ivar default_hashfunc: the function that will be used for hashing the + data. Should implement the same API as hashlib.sha1 + :ivar int baselen: the length of a :term:`raw encoding` of private key + :ivar ecdsa.keys.VerifyingKey verifying_key: the public key + associated with this private key + :ivar ecdsa.ecdsa.Private_key privkey: the actual private key + """ + + def __init__(self, _error__please_use_generate=None): + """Unsupported, please use one of the classmethods to initialise.""" + if not _error__please_use_generate: + raise TypeError("Please use SigningKey.generate() to construct me") + self.curve = None + self.default_hashfunc = None + self.baselen = None + self.verifying_key = None + self.privkey = None + + def __eq__(self, other): + """Return True if the points are identical, False otherwise.""" + if isinstance(other, SigningKey): + return ( + self.curve == other.curve + and self.verifying_key == other.verifying_key + and self.privkey == other.privkey + ) + return NotImplemented + + def __ne__(self, other): + """Return False if the points are identical, True otherwise.""" + return not self == other + + @classmethod + def generate(cls, curve=NIST192p, entropy=None, hashfunc=sha1): + """ + Generate a random private key. + + :param curve: The curve on which the point needs to reside, defaults + to NIST192p + :type curve: ecdsa.curves.Curve + :param entropy: Source of randomness for generating the private keys, + should provide cryptographically secure random numbers if the keys + need to be secure. Uses os.urandom() by default. + :type entropy: callable + :param hashfunc: The default hash function that will be used for + signing, needs to implement the same interface + as hashlib.sha1 + :type hashfunc: callable + + :return: Initialised SigningKey object + :rtype: SigningKey + """ + secexp = randrange(curve.order, entropy) + return cls.from_secret_exponent(secexp, curve, hashfunc) + + @classmethod + def from_secret_exponent(cls, secexp, curve=NIST192p, hashfunc=sha1): + """ + Create a private key from a random integer. + + Note: it's a low level method, it's recommended to use the + :func:`~SigningKey.generate` method to create private keys. + + :param int secexp: secret multiplier (the actual private key in ECDSA). + Needs to be an integer between 1 and the curve order. + :param curve: The curve on which the point needs to reside + :type curve: ecdsa.curves.Curve + :param hashfunc: The default hash function that will be used for + signing, needs to implement the same interface + as hashlib.sha1 + :type hashfunc: callable + + :raises MalformedPointError: when the provided secexp is too large + or too small for the curve selected + :raises RuntimeError: if the generation of public key from private + key failed + + :return: Initialised SigningKey object + :rtype: SigningKey + """ + self = cls(_error__please_use_generate=True) + self.curve = curve + self.default_hashfunc = hashfunc + self.baselen = curve.baselen + n = curve.order + if not 1 <= secexp < n: + raise MalformedPointError( + "Invalid value for secexp, expected integer " + "between 1 and {0}".format(n) + ) + pubkey_point = curve.generator * secexp + if hasattr(pubkey_point, "scale"): + pubkey_point = pubkey_point.scale() + self.verifying_key = VerifyingKey.from_public_point( + pubkey_point, curve, hashfunc, False + ) + pubkey = self.verifying_key.pubkey + self.privkey = ecdsa.Private_key(pubkey, secexp) + self.privkey.order = n + return self + + @classmethod + def from_string(cls, string, curve=NIST192p, hashfunc=sha1): + """ + Decode the private key from :term:`raw encoding`. + + Note: the name of this method is a misnomer coming from days of + Python 2, when binary strings and character strings shared a type. + In Python 3, the expected type is `bytes`. + + :param string: the raw encoding of the private key + :type string: bytes like object + :param curve: The curve on which the point needs to reside + :type curve: ecdsa.curves.Curve + :param hashfunc: The default hash function that will be used for + signing, needs to implement the same interface + as hashlib.sha1 + :type hashfunc: callable + + :raises MalformedPointError: if the length of encoding doesn't match + the provided curve or the encoded values is too large + :raises RuntimeError: if the generation of public key from private + key failed + + :return: Initialised SigningKey object + :rtype: SigningKey + """ + string = normalise_bytes(string) + if len(string) != curve.baselen: + raise MalformedPointError( + "Invalid length of private key, received {0}, " + "expected {1}".format(len(string), curve.baselen) + ) + secexp = string_to_number(string) + return cls.from_secret_exponent(secexp, curve, hashfunc) + + @classmethod + def from_pem(cls, string, hashfunc=sha1, valid_curve_encodings=None): + """ + Initialise from key stored in :term:`PEM` format. + + The PEM formats supported are the un-encrypted RFC5915 + (the ssleay format) supported by OpenSSL, and the more common + un-encrypted RFC5958 (the PKCS #8 format). + + The legacy format files have the header with the string + ``BEGIN EC PRIVATE KEY``. + PKCS#8 files have the header ``BEGIN PRIVATE KEY``. + Encrypted files (ones that include the string + ``Proc-Type: 4,ENCRYPTED`` + right after the PEM header) are not supported. + + See :func:`~SigningKey.from_der` for ASN.1 syntax of the objects in + this files. + + :param string: text with PEM-encoded private ECDSA key + :type string: str + :param valid_curve_encodings: list of allowed encoding formats + for curve parameters. By default (``None``) all are supported: + ``named_curve`` and ``explicit``. + :type valid_curve_encodings: :term:`set-like object` + + + :raises MalformedPointError: if the length of encoding doesn't match + the provided curve or the encoded values is too large + :raises RuntimeError: if the generation of public key from private + key failed + :raises UnexpectedDER: if the encoding of the PEM file is incorrect + + :return: Initialised SigningKey object + :rtype: SigningKey + """ + if not PY2 and isinstance(string, str): # pragma: no branch + string = string.encode() + + # The privkey pem may have multiple sections, commonly it also has + # "EC PARAMETERS", we need just "EC PRIVATE KEY". PKCS#8 should not + # have the "EC PARAMETERS" section; it's just "PRIVATE KEY". + private_key_index = string.find(b"-----BEGIN EC PRIVATE KEY-----") + if private_key_index == -1: + private_key_index = string.index(b"-----BEGIN PRIVATE KEY-----") + + return cls.from_der( + der.unpem(string[private_key_index:]), + hashfunc, + valid_curve_encodings, + ) + + @classmethod + def from_der(cls, string, hashfunc=sha1, valid_curve_encodings=None): + """ + Initialise from key stored in :term:`DER` format. + + The DER formats supported are the un-encrypted RFC5915 + (the ssleay format) supported by OpenSSL, and the more common + un-encrypted RFC5958 (the PKCS #8 format). + + Both formats contain an ASN.1 object following the syntax specified + in RFC5915:: + + ECPrivateKey ::= SEQUENCE { + version INTEGER { ecPrivkeyVer1(1) }} (ecPrivkeyVer1), + privateKey OCTET STRING, + parameters [0] ECParameters {{ NamedCurve }} OPTIONAL, + publicKey [1] BIT STRING OPTIONAL + } + + `publicKey` field is ignored completely (errors, if any, in it will + be undetected). + + Two formats are supported for the `parameters` field: the named + curve and the explicit encoding of curve parameters. + In the legacy ssleay format, this implementation requires the optional + `parameters` field to get the curve name. In PKCS #8 format, the curve + is part of the PrivateKeyAlgorithmIdentifier. + + The PKCS #8 format includes an ECPrivateKey object as the `privateKey` + field within a larger structure: + + OneAsymmetricKey ::= SEQUENCE { + version Version, + privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, + privateKey PrivateKey, + attributes [0] Attributes OPTIONAL, + ..., + [[2: publicKey [1] PublicKey OPTIONAL ]], + ... + } + + The `attributes` and `publicKey` fields are completely ignored; errors + in them will not be detected. + + :param string: binary string with DER-encoded private ECDSA key + :type string: bytes like object + :param valid_curve_encodings: list of allowed encoding formats + for curve parameters. By default (``None``) all are supported: + ``named_curve`` and ``explicit``. + :type valid_curve_encodings: :term:`set-like object` + + :raises MalformedPointError: if the length of encoding doesn't match + the provided curve or the encoded values is too large + :raises RuntimeError: if the generation of public key from private + key failed + :raises UnexpectedDER: if the encoding of the DER file is incorrect + + :return: Initialised SigningKey object + :rtype: SigningKey + """ + s = normalise_bytes(string) + curve = None + + s, empty = der.remove_sequence(s) + if empty != b(""): + raise der.UnexpectedDER( + "trailing junk after DER privkey: %s" % binascii.hexlify(empty) + ) + + version, s = der.remove_integer(s) + + # At this point, PKCS #8 has a sequence containing the algorithm + # identifier and the curve identifier. The ssleay format instead has + # an octet string containing the key data, so this is how we can + # distinguish the two formats. + if der.is_sequence(s): + if version not in (0, 1): + raise der.UnexpectedDER( + "expected version '0' or '1' at start of privkey, got %d" + % version + ) + + sequence, s = der.remove_sequence(s) + algorithm_oid, algorithm_identifier = der.remove_object(sequence) + curve = Curve.from_der(algorithm_identifier, valid_curve_encodings) + + if algorithm_oid not in (oid_ecPublicKey, oid_ecDH, oid_ecMQV): + raise der.UnexpectedDER( + "unexpected algorithm identifier '%s'" % (algorithm_oid,) + ) + if empty != b"": + raise der.UnexpectedDER( + "unexpected data after algorithm identifier: %s" + % binascii.hexlify(empty) + ) + + # Up next is an octet string containing an ECPrivateKey. Ignore + # the optional "attributes" and "publicKey" fields that come after. + s, _ = der.remove_octet_string(s) + + # Unpack the ECPrivateKey to get to the key data octet string, + # and rejoin the ssleay parsing path. + s, empty = der.remove_sequence(s) + if empty != b(""): + raise der.UnexpectedDER( + "trailing junk after DER privkey: %s" + % binascii.hexlify(empty) + ) + + version, s = der.remove_integer(s) + + # The version of the ECPrivateKey must be 1. + if version != 1: + raise der.UnexpectedDER( + "expected version '1' at start of DER privkey, got %d" + % version + ) + + privkey_str, s = der.remove_octet_string(s) + + if not curve: + tag, curve_oid_str, s = der.remove_constructed(s) + if tag != 0: + raise der.UnexpectedDER( + "expected tag 0 in DER privkey, got %d" % tag + ) + curve = Curve.from_der(curve_oid_str, valid_curve_encodings) + + # we don't actually care about the following fields + # + # tag, pubkey_bitstring, s = der.remove_constructed(s) + # if tag != 1: + # raise der.UnexpectedDER("expected tag 1 in DER privkey, got %d" + # % tag) + # pubkey_str = der.remove_bitstring(pubkey_bitstring, 0) + # if empty != "": + # raise der.UnexpectedDER("trailing junk after DER privkey " + # "pubkeystr: %s" + # % binascii.hexlify(empty)) + + # our from_string method likes fixed-length privkey strings + if len(privkey_str) < curve.baselen: + privkey_str = ( + b("\x00") * (curve.baselen - len(privkey_str)) + privkey_str + ) + return cls.from_string(privkey_str, curve, hashfunc) + + def to_string(self): + """ + Convert the private key to :term:`raw encoding`. + + Note: while the method is named "to_string", its name comes from + Python 2 days, when binary and character strings used the same type. + The type used in Python 3 is `bytes`. + + :return: raw encoding of private key + :rtype: bytes + """ + secexp = self.privkey.secret_multiplier + s = number_to_string(secexp, self.privkey.order) + return s + + def to_pem( + self, + point_encoding="uncompressed", + format="ssleay", + curve_parameters_encoding=None, + ): + """ + Convert the private key to the :term:`PEM` format. + + See :func:`~SigningKey.from_pem` method for format description. + + Only the named curve format is supported. + The public key will be included in generated string. + + The PEM header will specify ``BEGIN EC PRIVATE KEY`` or + ``BEGIN PRIVATE KEY``, depending on the desired format. + + :param str point_encoding: format to use for encoding public point + :param str format: either ``ssleay`` (default) or ``pkcs8`` + :param str curve_parameters_encoding: format of encoded curve + parameters, default depends on the curve, if the curve has + an associated OID, ``named_curve`` format will be used, + if no OID is associated with the curve, the fallback of + ``explicit`` parameters will be used. + + :return: PEM encoded private key + :rtype: bytes + + .. warning:: The PEM is encoded to US-ASCII, it needs to be + re-encoded if the system is incompatible (e.g. uses UTF-16) + """ + # TODO: "BEGIN ECPARAMETERS" + assert format in ("ssleay", "pkcs8") + header = "EC PRIVATE KEY" if format == "ssleay" else "PRIVATE KEY" + return der.topem( + self.to_der(point_encoding, format, curve_parameters_encoding), + header, + ) + + def to_der( + self, + point_encoding="uncompressed", + format="ssleay", + curve_parameters_encoding=None, + ): + """ + Convert the private key to the :term:`DER` format. + + See :func:`~SigningKey.from_der` method for format specification. + + Only the named curve format is supported. + The public key will be included in the generated string. + + :param str point_encoding: format to use for encoding public point + :param str format: either ``ssleay`` (default) or ``pkcs8`` + :param str curve_parameters_encoding: format of encoded curve + parameters, default depends on the curve, if the curve has + an associated OID, ``named_curve`` format will be used, + if no OID is associated with the curve, the fallback of + ``explicit`` parameters will be used. + + :return: DER encoded private key + :rtype: bytes + """ + # SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1), + # cont[1],bitstring]) + if point_encoding == "raw": + raise ValueError("raw encoding not allowed in DER") + assert format in ("ssleay", "pkcs8") + encoded_vk = self.get_verifying_key().to_string(point_encoding) + priv_key_elems = [ + der.encode_integer(1), + der.encode_octet_string(self.to_string()), + ] + if format == "ssleay": + priv_key_elems.append( + der.encode_constructed( + 0, self.curve.to_der(curve_parameters_encoding) + ) + ) + # the 0 in encode_bitstring specifies the number of unused bits + # in the `encoded_vk` string + priv_key_elems.append( + der.encode_constructed(1, der.encode_bitstring(encoded_vk, 0)) + ) + ec_private_key = der.encode_sequence(*priv_key_elems) + + if format == "ssleay": + return ec_private_key + else: + return der.encode_sequence( + # version = 1 means the public key is not present in the + # top-level structure. + der.encode_integer(1), + der.encode_sequence( + der.encode_oid(*oid_ecPublicKey), + self.curve.to_der(curve_parameters_encoding), + ), + der.encode_octet_string(ec_private_key), + ) + + def get_verifying_key(self): + """ + Return the VerifyingKey associated with this private key. + + Equivalent to reading the `verifying_key` field of an instance. + + :return: a public key that can be used to verify the signatures made + with this SigningKey + :rtype: VerifyingKey + """ + return self.verifying_key + + def sign_deterministic( + self, + data, + hashfunc=None, + sigencode=sigencode_string, + extra_entropy=b"", + ): + """ + Create signature over data using the deterministic RFC6979 algorithm. + + The data will be hashed using the `hashfunc` function before signing. + + This is the recommended method for performing signatures when hashing + of data is necessary. + + :param data: data to be hashed and computed signature over + :type data: bytes like object + :param hashfunc: hash function to use for computing the signature, + if unspecified, the default hash function selected during + object initialisation will be used (see + `VerifyingKey.default_hashfunc`). The object needs to implement + the same interface as hashlib.sha1. + :type hashfunc: callable + :param sigencode: function used to encode the signature. + The function needs to accept three parameters: the two integers + that are the signature and the order of the curve over which the + signature was computed. It needs to return an encoded signature. + See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der` + as examples of such functions. + :type sigencode: callable + :param extra_entropy: additional data that will be fed into the random + number generator used in the RFC6979 process. Entirely optional. + :type extra_entropy: bytes like object + + :return: encoded signature over `data` + :rtype: bytes or sigencode function dependant type + """ + hashfunc = hashfunc or self.default_hashfunc + data = normalise_bytes(data) + extra_entropy = normalise_bytes(extra_entropy) + digest = hashfunc(data).digest() + + return self.sign_digest_deterministic( + digest, + hashfunc=hashfunc, + sigencode=sigencode, + extra_entropy=extra_entropy, + allow_truncate=True, + ) + + def sign_digest_deterministic( + self, + digest, + hashfunc=None, + sigencode=sigencode_string, + extra_entropy=b"", + allow_truncate=False, + ): + """ + Create signature for digest using the deterministic RFC6979 algorithm. + + `digest` should be the output of cryptographically secure hash function + like SHA256 or SHA-3-256. + + This is the recommended method for performing signatures when no + hashing of data is necessary. + + :param digest: hash of data that will be signed + :type digest: bytes like object + :param hashfunc: hash function to use for computing the random "k" + value from RFC6979 process, + if unspecified, the default hash function selected during + object initialisation will be used (see + `VerifyingKey.default_hashfunc`). The object needs to implement + the same interface as hashlib.sha1. + :type hashfunc: callable + :param sigencode: function used to encode the signature. + The function needs to accept three parameters: the two integers + that are the signature and the order of the curve over which the + signature was computed. It needs to return an encoded signature. + See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der` + as examples of such functions. + :type sigencode: callable + :param extra_entropy: additional data that will be fed into the random + number generator used in the RFC6979 process. Entirely optional. + :type extra_entropy: bytes like object + :param bool allow_truncate: if True, the provided digest can have + bigger bit-size than the order of the curve, the extra bits (at + the end of the digest) will be truncated. Use it when signing + SHA-384 output using NIST256p or in similar situations. + + :return: encoded signature for the `digest` hash + :rtype: bytes or sigencode function dependant type + """ + secexp = self.privkey.secret_multiplier + hashfunc = hashfunc or self.default_hashfunc + digest = normalise_bytes(digest) + extra_entropy = normalise_bytes(extra_entropy) + + def simple_r_s(r, s, order): + return r, s, order + + retry_gen = 0 + while True: + k = rfc6979.generate_k( + self.curve.generator.order(), + secexp, + hashfunc, + digest, + retry_gen=retry_gen, + extra_entropy=extra_entropy, + ) + try: + r, s, order = self.sign_digest( + digest, + sigencode=simple_r_s, + k=k, + allow_truncate=allow_truncate, + ) + break + except RSZeroError: + retry_gen += 1 + + return sigencode(r, s, order) + + def sign( + self, + data, + entropy=None, + hashfunc=None, + sigencode=sigencode_string, + k=None, + allow_truncate=True, + ): + """ + Create signature over data using the probabilistic ECDSA algorithm. + + This method uses the standard ECDSA algorithm that requires a + cryptographically secure random number generator. + + It's recommended to use the :func:`~SigningKey.sign_deterministic` + method instead of this one. + + :param data: data that will be hashed for signing + :type data: bytes like object + :param callable entropy: randomness source, os.urandom by default + :param hashfunc: hash function to use for hashing the provided `data`. + If unspecified the default hash function selected during + object initialisation will be used (see + `VerifyingKey.default_hashfunc`). + Should behave like hashlib.sha1. The output length of the + hash (in bytes) must not be longer than the length of the curve + order (rounded up to the nearest byte), so using SHA256 with + NIST256p is ok, but SHA256 with NIST192p is not. (In the 2**-96ish + unlikely event of a hash output larger than the curve order, the + hash will effectively be wrapped mod n). + Use hashfunc=hashlib.sha1 to match openssl's -ecdsa-with-SHA1 mode, + or hashfunc=hashlib.sha256 for openssl-1.0.0's -ecdsa-with-SHA256. + :type hashfunc: callable + :param sigencode: function used to encode the signature. + The function needs to accept three parameters: the two integers + that are the signature and the order of the curve over which the + signature was computed. It needs to return an encoded signature. + See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der` + as examples of such functions. + :type sigencode: callable + :param int k: a pre-selected nonce for calculating the signature. + In typical use cases, it should be set to None (the default) to + allow its generation from an entropy source. + :param bool allow_truncate: if True, the provided digest can have + bigger bit-size than the order of the curve, the extra bits (at + the end of the digest) will be truncated. Use it when signing + SHA-384 output using NIST256p or in similar situations. True by + default. + + :raises RSZeroError: in the unlikely event when "r" parameter or + "s" parameter of the created signature is equal 0, as that would + leak the key. Caller should try a better entropy source, retry with + different 'k', or use the + :func:`~SigningKey.sign_deterministic` in such case. + + :return: encoded signature of the hash of `data` + :rtype: bytes or sigencode function dependant type + """ + hashfunc = hashfunc or self.default_hashfunc + data = normalise_bytes(data) + h = hashfunc(data).digest() + return self.sign_digest(h, entropy, sigencode, k, allow_truncate) + + def sign_digest( + self, + digest, + entropy=None, + sigencode=sigencode_string, + k=None, + allow_truncate=False, + ): + """ + Create signature over digest using the probabilistic ECDSA algorithm. + + This method uses the standard ECDSA algorithm that requires a + cryptographically secure random number generator. + + This method does not hash the input. + + It's recommended to use the + :func:`~SigningKey.sign_digest_deterministic` method + instead of this one. + + :param digest: hash value that will be signed + :type digest: bytes like object + :param callable entropy: randomness source, os.urandom by default + :param sigencode: function used to encode the signature. + The function needs to accept three parameters: the two integers + that are the signature and the order of the curve over which the + signature was computed. It needs to return an encoded signature. + See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der` + as examples of such functions. + :type sigencode: callable + :param int k: a pre-selected nonce for calculating the signature. + In typical use cases, it should be set to None (the default) to + allow its generation from an entropy source. + :param bool allow_truncate: if True, the provided digest can have + bigger bit-size than the order of the curve, the extra bits (at + the end of the digest) will be truncated. Use it when signing + SHA-384 output using NIST256p or in similar situations. + + :raises RSZeroError: in the unlikely event when "r" parameter or + "s" parameter of the created signature is equal 0, as that would + leak the key. Caller should try a better entropy source, retry with + different 'k', or use the + :func:`~SigningKey.sign_digest_deterministic` in such case. + + :return: encoded signature for the `digest` hash + :rtype: bytes or sigencode function dependant type + """ + digest = normalise_bytes(digest) + number = _truncate_and_convert_digest( + digest, self.curve, allow_truncate, + ) + r, s = self.sign_number(number, entropy, k) + return sigencode(r, s, self.privkey.order) + + def sign_number(self, number, entropy=None, k=None): + """ + Sign an integer directly. + + Note, this is a low level method, usually you will want to use + :func:`~SigningKey.sign_deterministic` or + :func:`~SigningKey.sign_digest_deterministic`. + + :param int number: number to sign using the probabilistic ECDSA + algorithm. + :param callable entropy: entropy source, os.urandom by default + :param int k: pre-selected nonce for signature operation. If unset + it will be selected at random using the entropy source. + + :raises RSZeroError: in the unlikely event when "r" parameter or + "s" parameter of the created signature is equal 0, as that would + leak the key. Caller should try a better entropy source, retry with + different 'k', or use the + :func:`~SigningKey.sign_digest_deterministic` in such case. + + :return: the "r" and "s" parameters of the signature + :rtype: tuple of ints + """ + order = self.privkey.order + + if k is not None: + _k = k + else: + _k = randrange(order, entropy) + + assert 1 <= _k < order + sig = self.privkey.sign(number, _k) + return sig.r, sig.s diff --git a/venv/lib/python3.10/site-packages/ecdsa/numbertheory.py b/venv/lib/python3.10/site-packages/ecdsa/numbertheory.py new file mode 100644 index 0000000..9ad3d21 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/numbertheory.py @@ -0,0 +1,822 @@ +#! /usr/bin/env python +# +# Provide some simple capabilities from number theory. +# +# Version of 2008.11.14. +# +# Written in 2005 and 2006 by Peter Pearson and placed in the public domain. +# Revision history: +# 2008.11.14: Use pow(base, exponent, modulus) for modular_exp. +# Make gcd and lcm accept arbitrarly many arguments. + +from __future__ import division + +import sys +from six import integer_types, PY2 +from six.moves import reduce + +try: + xrange +except NameError: + xrange = range +try: + from gmpy2 import powmod + + GMPY2 = True + GMPY = False +except ImportError: + GMPY2 = False + try: + from gmpy import mpz + + GMPY = True + except ImportError: + GMPY = False + +import math +import warnings + + +class Error(Exception): + """Base class for exceptions in this module.""" + + pass + + +class SquareRootError(Error): + pass + + +class NegativeExponentError(Error): + pass + + +def modular_exp(base, exponent, modulus): # pragma: no cover + """Raise base to exponent, reducing by modulus""" + # deprecated in 0.14 + warnings.warn( + "Function is unused in library code. If you use this code, " + "change to pow() builtin.", + DeprecationWarning, + ) + if exponent < 0: + raise NegativeExponentError( + "Negative exponents (%d) not allowed" % exponent + ) + return pow(base, exponent, modulus) + + +def polynomial_reduce_mod(poly, polymod, p): + """Reduce poly by polymod, integer arithmetic modulo p. + + Polynomials are represented as lists of coefficients + of increasing powers of x.""" + + # This module has been tested only by extensive use + # in calculating modular square roots. + + # Just to make this easy, require a monic polynomial: + assert polymod[-1] == 1 + + assert len(polymod) > 1 + + while len(poly) >= len(polymod): + if poly[-1] != 0: + for i in xrange(2, len(polymod) + 1): + poly[-i] = (poly[-i] - poly[-1] * polymod[-i]) % p + poly = poly[0:-1] + + return poly + + +def polynomial_multiply_mod(m1, m2, polymod, p): + """Polynomial multiplication modulo a polynomial over ints mod p. + + Polynomials are represented as lists of coefficients + of increasing powers of x.""" + + # This is just a seat-of-the-pants implementation. + + # This module has been tested only by extensive use + # in calculating modular square roots. + + # Initialize the product to zero: + + prod = (len(m1) + len(m2) - 1) * [0] + + # Add together all the cross-terms: + + for i in xrange(len(m1)): + for j in xrange(len(m2)): + prod[i + j] = (prod[i + j] + m1[i] * m2[j]) % p + + return polynomial_reduce_mod(prod, polymod, p) + + +def polynomial_exp_mod(base, exponent, polymod, p): + """Polynomial exponentiation modulo a polynomial over ints mod p. + + Polynomials are represented as lists of coefficients + of increasing powers of x.""" + + # Based on the Handbook of Applied Cryptography, algorithm 2.227. + + # This module has been tested only by extensive use + # in calculating modular square roots. + + assert exponent < p + + if exponent == 0: + return [1] + + G = base + k = exponent + if k % 2 == 1: + s = G + else: + s = [1] + + while k > 1: + k = k // 2 + G = polynomial_multiply_mod(G, G, polymod, p) + if k % 2 == 1: + s = polynomial_multiply_mod(G, s, polymod, p) + + return s + + +def jacobi(a, n): + """Jacobi symbol""" + + # Based on the Handbook of Applied Cryptography (HAC), algorithm 2.149. + + # This function has been tested by comparison with a small + # table printed in HAC, and by extensive use in calculating + # modular square roots. + + assert n >= 3 + assert n % 2 == 1 + a = a % n + if a == 0: + return 0 + if a == 1: + return 1 + a1, e = a, 0 + while a1 % 2 == 0: + a1, e = a1 // 2, e + 1 + if e % 2 == 0 or n % 8 == 1 or n % 8 == 7: + s = 1 + else: + s = -1 + if a1 == 1: + return s + if n % 4 == 3 and a1 % 4 == 3: + s = -s + return s * jacobi(n % a1, a1) + + +def square_root_mod_prime(a, p): + """Modular square root of a, mod p, p prime.""" + + # Based on the Handbook of Applied Cryptography, algorithms 3.34 to 3.39. + + # This module has been tested for all values in [0,p-1] for + # every prime p from 3 to 1229. + + assert 0 <= a < p + assert 1 < p + + if a == 0: + return 0 + if p == 2: + return a + + jac = jacobi(a, p) + if jac == -1: + raise SquareRootError("%d has no square root modulo %d" % (a, p)) + + if p % 4 == 3: + return pow(a, (p + 1) // 4, p) + + if p % 8 == 5: + d = pow(a, (p - 1) // 4, p) + if d == 1: + return pow(a, (p + 3) // 8, p) + if d == p - 1: + return (2 * a * pow(4 * a, (p - 5) // 8, p)) % p + raise RuntimeError("Shouldn't get here.") + + if PY2: + # xrange on python2 can take integers representable as C long only + range_top = min(0x7FFFFFFF, p) + else: + range_top = p + for b in xrange(2, range_top): + if jacobi(b * b - 4 * a, p) == -1: + f = (a, -b, 1) + ff = polynomial_exp_mod((0, 1), (p + 1) // 2, f, p) + assert ff[1] == 0 + return ff[0] + raise RuntimeError("No b found.") + + +# because all the inverse_mod code is arch/environment specific, and coveralls +# expects it to execute equal number of times, we need to waive it by +# adding the "no branch" pragma to all branches +if GMPY2: # pragma: no branch + + def inverse_mod(a, m): + """Inverse of a mod m.""" + if a == 0: # pragma: no branch + return 0 + return powmod(a, -1, m) + + +elif GMPY: # pragma: no branch + + def inverse_mod(a, m): + """Inverse of a mod m.""" + # while libgmp does support inverses modulo, it is accessible + # only using the native `pow()` function, and `pow()` in gmpy sanity + # checks the parameters before passing them on to underlying + # implementation + if a == 0: # pragma: no branch + return 0 + a = mpz(a) + m = mpz(m) + + lm, hm = mpz(1), mpz(0) + low, high = a % m, m + while low > 1: # pragma: no branch + r = high // low + lm, low, hm, high = hm - lm * r, high - low * r, lm, low + + return lm % m + + +elif sys.version_info >= (3, 8): # pragma: no branch + + def inverse_mod(a, m): + """Inverse of a mod m.""" + if a == 0: # pragma: no branch + return 0 + return pow(a, -1, m) + + +else: # pragma: no branch + + def inverse_mod(a, m): + """Inverse of a mod m.""" + + if a == 0: # pragma: no branch + return 0 + + lm, hm = 1, 0 + low, high = a % m, m + while low > 1: # pragma: no branch + r = high // low + lm, low, hm, high = hm - lm * r, high - low * r, lm, low + + return lm % m + + +try: + gcd2 = math.gcd +except AttributeError: + + def gcd2(a, b): + """Greatest common divisor using Euclid's algorithm.""" + while a: + a, b = b % a, a + return b + + +def gcd(*a): + """Greatest common divisor. + + Usage: gcd([ 2, 4, 6 ]) + or: gcd(2, 4, 6) + """ + + if len(a) > 1: + return reduce(gcd2, a) + if hasattr(a[0], "__iter__"): + return reduce(gcd2, a[0]) + return a[0] + + +def lcm2(a, b): + """Least common multiple of two integers.""" + + return (a * b) // gcd(a, b) + + +def lcm(*a): + """Least common multiple. + + Usage: lcm([ 3, 4, 5 ]) + or: lcm(3, 4, 5) + """ + + if len(a) > 1: + return reduce(lcm2, a) + if hasattr(a[0], "__iter__"): + return reduce(lcm2, a[0]) + return a[0] + + +def factorization(n): + """Decompose n into a list of (prime,exponent) pairs.""" + + assert isinstance(n, integer_types) + + if n < 2: + return [] + + result = [] + + # Test the small primes: + + for d in smallprimes: + if d > n: + break + q, r = divmod(n, d) + if r == 0: + count = 1 + while d <= n: + n = q + q, r = divmod(n, d) + if r != 0: + break + count = count + 1 + result.append((d, count)) + + # If n is still greater than the last of our small primes, + # it may require further work: + + if n > smallprimes[-1]: + if is_prime(n): # If what's left is prime, it's easy: + result.append((n, 1)) + else: # Ugh. Search stupidly for a divisor: + d = smallprimes[-1] + while 1: + d = d + 2 # Try the next divisor. + q, r = divmod(n, d) + if q < d: # n < d*d means we're done, n = 1 or prime. + break + if r == 0: # d divides n. How many times? + count = 1 + n = q + while d <= n: # As long as d might still divide n, + q, r = divmod(n, d) # see if it does. + if r != 0: + break + n = q # It does. Reduce n, increase count. + count = count + 1 + result.append((d, count)) + if n > 1: + result.append((n, 1)) + + return result + + +def phi(n): # pragma: no cover + """Return the Euler totient function of n.""" + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + assert isinstance(n, integer_types) + + if n < 3: + return 1 + + result = 1 + ff = factorization(n) + for f in ff: + e = f[1] + if e > 1: + result = result * f[0] ** (e - 1) * (f[0] - 1) + else: + result = result * (f[0] - 1) + return result + + +def carmichael(n): # pragma: no cover + """Return Carmichael function of n. + + Carmichael(n) is the smallest integer x such that + m**x = 1 mod n for all m relatively prime to n. + """ + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + return carmichael_of_factorized(factorization(n)) + + +def carmichael_of_factorized(f_list): # pragma: no cover + """Return the Carmichael function of a number that is + represented as a list of (prime,exponent) pairs. + """ + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + if len(f_list) < 1: + return 1 + + result = carmichael_of_ppower(f_list[0]) + for i in xrange(1, len(f_list)): + result = lcm(result, carmichael_of_ppower(f_list[i])) + + return result + + +def carmichael_of_ppower(pp): # pragma: no cover + """Carmichael function of the given power of the given prime.""" + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + p, a = pp + if p == 2 and a > 2: + return 2 ** (a - 2) + else: + return (p - 1) * p ** (a - 1) + + +def order_mod(x, m): # pragma: no cover + """Return the order of x in the multiplicative group mod m.""" + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + # Warning: this implementation is not very clever, and will + # take a long time if m is very large. + + if m <= 1: + return 0 + + assert gcd(x, m) == 1 + + z = x + result = 1 + while z != 1: + z = (z * x) % m + result = result + 1 + return result + + +def largest_factor_relatively_prime(a, b): # pragma: no cover + """Return the largest factor of a relatively prime to b.""" + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + while 1: + d = gcd(a, b) + if d <= 1: + break + b = d + while 1: + q, r = divmod(a, d) + if r > 0: + break + a = q + return a + + +def kinda_order_mod(x, m): # pragma: no cover + """Return the order of x in the multiplicative group mod m', + where m' is the largest factor of m relatively prime to x. + """ + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + return order_mod(x, largest_factor_relatively_prime(m, x)) + + +def is_prime(n): + """Return True if x is prime, False otherwise. + + We use the Miller-Rabin test, as given in Menezes et al. p. 138. + This test is not exact: there are composite values n for which + it returns True. + + In testing the odd numbers from 10000001 to 19999999, + about 66 composites got past the first test, + 5 got past the second test, and none got past the third. + Since factors of 2, 3, 5, 7, and 11 were detected during + preliminary screening, the number of numbers tested by + Miller-Rabin was (19999999 - 10000001)*(2/3)*(4/5)*(6/7) + = 4.57 million. + """ + + # (This is used to study the risk of false positives:) + global miller_rabin_test_count + + miller_rabin_test_count = 0 + + if n <= smallprimes[-1]: + if n in smallprimes: + return True + else: + return False + + if gcd(n, 2 * 3 * 5 * 7 * 11) != 1: + return False + + # Choose a number of iterations sufficient to reduce the + # probability of accepting a composite below 2**-80 + # (from Menezes et al. Table 4.4): + + t = 40 + n_bits = 1 + int(math.log(n, 2)) + for k, tt in ( + (100, 27), + (150, 18), + (200, 15), + (250, 12), + (300, 9), + (350, 8), + (400, 7), + (450, 6), + (550, 5), + (650, 4), + (850, 3), + (1300, 2), + ): + if n_bits < k: + break + t = tt + + # Run the test t times: + + s = 0 + r = n - 1 + while (r % 2) == 0: + s = s + 1 + r = r // 2 + for i in xrange(t): + a = smallprimes[i] + y = pow(a, r, n) + if y != 1 and y != n - 1: + j = 1 + while j <= s - 1 and y != n - 1: + y = pow(y, 2, n) + if y == 1: + miller_rabin_test_count = i + 1 + return False + j = j + 1 + if y != n - 1: + miller_rabin_test_count = i + 1 + return False + return True + + +def next_prime(starting_value): + """Return the smallest prime larger than the starting value.""" + + if starting_value < 2: + return 2 + result = (starting_value + 1) | 1 + while not is_prime(result): + result = result + 2 + return result + + +smallprimes = [ + 2, + 3, + 5, + 7, + 11, + 13, + 17, + 19, + 23, + 29, + 31, + 37, + 41, + 43, + 47, + 53, + 59, + 61, + 67, + 71, + 73, + 79, + 83, + 89, + 97, + 101, + 103, + 107, + 109, + 113, + 127, + 131, + 137, + 139, + 149, + 151, + 157, + 163, + 167, + 173, + 179, + 181, + 191, + 193, + 197, + 199, + 211, + 223, + 227, + 229, + 233, + 239, + 241, + 251, + 257, + 263, + 269, + 271, + 277, + 281, + 283, + 293, + 307, + 311, + 313, + 317, + 331, + 337, + 347, + 349, + 353, + 359, + 367, + 373, + 379, + 383, + 389, + 397, + 401, + 409, + 419, + 421, + 431, + 433, + 439, + 443, + 449, + 457, + 461, + 463, + 467, + 479, + 487, + 491, + 499, + 503, + 509, + 521, + 523, + 541, + 547, + 557, + 563, + 569, + 571, + 577, + 587, + 593, + 599, + 601, + 607, + 613, + 617, + 619, + 631, + 641, + 643, + 647, + 653, + 659, + 661, + 673, + 677, + 683, + 691, + 701, + 709, + 719, + 727, + 733, + 739, + 743, + 751, + 757, + 761, + 769, + 773, + 787, + 797, + 809, + 811, + 821, + 823, + 827, + 829, + 839, + 853, + 857, + 859, + 863, + 877, + 881, + 883, + 887, + 907, + 911, + 919, + 929, + 937, + 941, + 947, + 953, + 967, + 971, + 977, + 983, + 991, + 997, + 1009, + 1013, + 1019, + 1021, + 1031, + 1033, + 1039, + 1049, + 1051, + 1061, + 1063, + 1069, + 1087, + 1091, + 1093, + 1097, + 1103, + 1109, + 1117, + 1123, + 1129, + 1151, + 1153, + 1163, + 1171, + 1181, + 1187, + 1193, + 1201, + 1213, + 1217, + 1223, + 1229, +] + +miller_rabin_test_count = 0 diff --git a/venv/lib/python3.10/site-packages/ecdsa/rfc6979.py b/venv/lib/python3.10/site-packages/ecdsa/rfc6979.py new file mode 100644 index 0000000..1e577c0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/rfc6979.py @@ -0,0 +1,110 @@ +""" +RFC 6979: + Deterministic Usage of the Digital Signature Algorithm (DSA) and + Elliptic Curve Digital Signature Algorithm (ECDSA) + + http://tools.ietf.org/html/rfc6979 + +Many thanks to Coda Hale for his implementation in Go language: + https://github.com/codahale/rfc6979 +""" + +import hmac +from binascii import hexlify +from .util import number_to_string, number_to_string_crop, bit_length +from ._compat import hmac_compat + + +# bit_length was defined in this module previously so keep it for backwards +# compatibility, will need to deprecate and remove it later +__all__ = ["bit_length", "bits2int", "bits2octets", "generate_k"] + + +def bits2int(data, qlen): + x = int(hexlify(data), 16) + l = len(data) * 8 + + if l > qlen: + return x >> (l - qlen) + return x + + +def bits2octets(data, order): + z1 = bits2int(data, bit_length(order)) + z2 = z1 - order + + if z2 < 0: + z2 = z1 + + return number_to_string_crop(z2, order) + + +# https://tools.ietf.org/html/rfc6979#section-3.2 +def generate_k(order, secexp, hash_func, data, retry_gen=0, extra_entropy=b""): + """ + order - order of the DSA generator used in the signature + secexp - secure exponent (private key) in numeric form + hash_func - reference to the same hash function used for generating + hash + data - hash in binary form of the signing data + retry_gen - int - how many good 'k' values to skip before returning + extra_entropy - extra added data in binary form as per section-3.6 of + rfc6979 + """ + + qlen = bit_length(order) + holen = hash_func().digest_size + rolen = (qlen + 7) // 8 + bx = ( + hmac_compat(number_to_string(secexp, order)), + hmac_compat(bits2octets(data, order)), + hmac_compat(extra_entropy), + ) + + # Step B + v = b"\x01" * holen + + # Step C + k = b"\x00" * holen + + # Step D + + k = hmac.new(k, digestmod=hash_func) + k.update(v + b"\x00") + for i in bx: + k.update(i) + k = k.digest() + + # Step E + v = hmac.new(k, v, hash_func).digest() + + # Step F + k = hmac.new(k, digestmod=hash_func) + k.update(v + b"\x01") + for i in bx: + k.update(i) + k = k.digest() + + # Step G + v = hmac.new(k, v, hash_func).digest() + + # Step H + while True: + # Step H1 + t = b"" + + # Step H2 + while len(t) < rolen: + v = hmac.new(k, v, hash_func).digest() + t += v + + # Step H3 + secret = bits2int(t, qlen) + + if 1 <= secret < order: + if retry_gen <= 0: + return secret + retry_gen -= 1 + + k = hmac.new(k, v + b"\x00", hash_func).digest() + v = hmac.new(k, v, hash_func).digest() diff --git a/venv/lib/python3.10/site-packages/ecdsa/test_curves.py b/venv/lib/python3.10/site-packages/ecdsa/test_curves.py new file mode 100644 index 0000000..69990c9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/test_curves.py @@ -0,0 +1,310 @@ +try: + import unittest2 as unittest +except ImportError: + import unittest + +import base64 +import pytest +from .curves import Curve, NIST256p, curves, UnknownCurveError, PRIME_FIELD_OID +from .ellipticcurve import CurveFp, PointJacobi +from . import der +from .util import number_to_string + + +class TestParameterEncoding(unittest.TestCase): + @classmethod + def setUpClass(cls): + # minimal, but with cofactor (excludes seed when compared to + # OpenSSL output) + cls.base64_params = ( + "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////" + "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K" + "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd" + "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1" + "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=" + ) + + def test_from_pem(self): + pem_params = ( + "-----BEGIN EC PARAMETERS-----\n" + "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" + "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" + "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" + "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" + "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" + "-----END EC PARAMETERS-----\n" + ) + curve = Curve.from_pem(pem_params) + + self.assertIs(curve, NIST256p) + + def test_from_pem_with_explicit_when_explicit_disabled(self): + pem_params = ( + "-----BEGIN EC PARAMETERS-----\n" + "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" + "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" + "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" + "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" + "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" + "-----END EC PARAMETERS-----\n" + ) + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_pem(pem_params, ["named_curve"]) + + self.assertIn("explicit curve parameters not", str(e.exception)) + + def test_from_pem_with_named_curve_with_named_curve_disabled(self): + pem_params = ( + "-----BEGIN EC PARAMETERS-----\n" + "BggqhkjOPQMBBw==\n" + "-----END EC PARAMETERS-----\n" + ) + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_pem(pem_params, ["explicit"]) + + self.assertIn("named_curve curve parameters not", str(e.exception)) + + def test_from_pem_with_wrong_header(self): + pem_params = ( + "-----BEGIN PARAMETERS-----\n" + "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" + "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" + "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" + "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" + "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" + "-----END PARAMETERS-----\n" + ) + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_pem(pem_params) + + self.assertIn("PARAMETERS PEM header", str(e.exception)) + + def test_to_pem(self): + pem_params = ( + b"-----BEGIN EC PARAMETERS-----\n" + b"BggqhkjOPQMBBw==\n" + b"-----END EC PARAMETERS-----\n" + ) + encoding = NIST256p.to_pem() + + self.assertEqual(pem_params, encoding) + + def test_compare_with_different_object(self): + self.assertNotEqual(NIST256p, 256) + + def test_named_curve_params_der(self): + encoded = NIST256p.to_der() + + # just the encoding of the NIST256p OID (prime256v1) + self.assertEqual(b"\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07", encoded) + + def test_verify_that_default_is_named_curve_der(self): + encoded_default = NIST256p.to_der() + encoded_named = NIST256p.to_der("named_curve") + + self.assertEqual(encoded_default, encoded_named) + + def test_encoding_to_explicit_params(self): + encoded = NIST256p.to_der("explicit") + + self.assertEqual(encoded, bytes(base64.b64decode(self.base64_params))) + + def test_encoding_to_explicit_compressed_params(self): + encoded = NIST256p.to_der("explicit", "compressed") + + compressed_base_point = ( + "MIHAAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP//////////" + "/////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12Ko6" + "k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEIQNrF9Hy4SxCR/i85uVjpEDydwN9" + "gS3rM6D0oTlF2JjClgIhAP////8AAAAA//////////+85vqtpxeehPO5ysL8YyVR" + "AgEB" + ) + + self.assertEqual( + encoded, bytes(base64.b64decode(compressed_base_point)) + ) + + def test_decoding_explicit_from_openssl(self): + # generated with openssl 1.1.1k using + # openssl ecparam -name P-256 -param_enc explicit -out /tmp/file.pem + p256_explicit = ( + "MIH3AgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP//////////" + "/////zBbBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12Ko6" + "k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsDFQDEnTYIhucEk2pmeOETnSa3gZ9+" + "kARBBGsX0fLhLEJH+Lzm5WOkQPJ3A32BLeszoPShOUXYmMKWT+NC4v4af5uO5+tK" + "fA+eFivOM1drMV7Oy7ZAaDe/UfUCIQD/////AAAAAP//////////vOb6racXnoTz" + "ucrC/GMlUQIBAQ==" + ) + + decoded = Curve.from_der(bytes(base64.b64decode(p256_explicit))) + + self.assertEqual(NIST256p, decoded) + + def test_decoding_well_known_from_explicit_params(self): + curve = Curve.from_der(bytes(base64.b64decode(self.base64_params))) + + self.assertIs(curve, NIST256p) + + def test_decoding_with_incorrect_valid_encodings(self): + with self.assertRaises(ValueError) as e: + Curve.from_der(b"", ["explicitCA"]) + + self.assertIn("Only named_curve", str(e.exception)) + + def test_compare_curves_with_different_generators(self): + curve_fp = CurveFp(23, 1, 7) + base_a = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) + base_b = PointJacobi(curve_fp, 1, 20, 1, 9, generator=True) + + curve_a = Curve("unknown", curve_fp, base_a, None) + curve_b = Curve("unknown", curve_fp, base_b, None) + + self.assertNotEqual(curve_a, curve_b) + + def test_default_encode_for_custom_curve(self): + curve_fp = CurveFp(23, 1, 7) + base_point = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) + + curve = Curve("unknown", curve_fp, base_point, None) + + encoded = curve.to_der() + + decoded = Curve.from_der(encoded) + + self.assertEqual(curve, decoded) + + expected = "MCECAQEwDAYHKoZIzj0BAQIBFzAGBAEBBAEHBAMEDQMCAQk=" + + self.assertEqual(encoded, bytes(base64.b64decode(expected))) + + def test_named_curve_encode_for_custom_curve(self): + curve_fp = CurveFp(23, 1, 7) + base_point = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) + + curve = Curve("unknown", curve_fp, base_point, None) + + with self.assertRaises(UnknownCurveError) as e: + curve.to_der("named_curve") + + self.assertIn("Can't encode curve", str(e.exception)) + + def test_try_decoding_binary_explicit(self): + sect113r1_explicit = ( + "MIGRAgEBMBwGByqGSM49AQIwEQIBcQYJKoZIzj0BAgMCAgEJMDkEDwAwiCUMpufH" + "/mSc6Fgg9wQPAOi+5NPiJgdEGIvg6ccjAxUAEOcjqxTWluZ2h1YVF1b+v4/LSakE" + "HwQAnXNhbzX0qxQH1zViwQ8ApSgwJ3lY7oTRMV7TGIYCDwEAAAAAAAAA2czsijnl" + "bwIBAg==" + ) + + with self.assertRaises(UnknownCurveError) as e: + Curve.from_der(base64.b64decode(sect113r1_explicit)) + + self.assertIn("Characteristic 2 curves unsupported", str(e.exception)) + + def test_decode_malformed_named_curve(self): + bad_der = der.encode_oid(*NIST256p.oid) + der.encode_integer(1) + + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_der(bad_der) + + self.assertIn("Unexpected data after OID", str(e.exception)) + + def test_decode_malformed_explicit_garbage_after_ECParam(self): + bad_der = bytes( + base64.b64decode(self.base64_params) + ) + der.encode_integer(1) + + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_der(bad_der) + + self.assertIn("Unexpected data after ECParameters", str(e.exception)) + + def test_decode_malformed_unknown_version_number(self): + bad_der = der.encode_sequence(der.encode_integer(2)) + + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_der(bad_der) + + self.assertIn("Unknown parameter encoding format", str(e.exception)) + + def test_decode_malformed_unknown_field_type(self): + curve_p = NIST256p.curve.p() + bad_der = der.encode_sequence( + der.encode_integer(1), + der.encode_sequence( + der.encode_oid(1, 2, 3), der.encode_integer(curve_p) + ), + der.encode_sequence( + der.encode_octet_string( + number_to_string(NIST256p.curve.a() % curve_p, curve_p) + ), + der.encode_octet_string( + number_to_string(NIST256p.curve.b(), curve_p) + ), + ), + der.encode_octet_string( + NIST256p.generator.to_bytes("uncompressed") + ), + der.encode_integer(NIST256p.generator.order()), + ) + + with self.assertRaises(UnknownCurveError) as e: + Curve.from_der(bad_der) + + self.assertIn("Unknown field type: (1, 2, 3)", str(e.exception)) + + def test_decode_malformed_garbage_after_prime(self): + curve_p = NIST256p.curve.p() + bad_der = der.encode_sequence( + der.encode_integer(1), + der.encode_sequence( + der.encode_oid(*PRIME_FIELD_OID), + der.encode_integer(curve_p), + der.encode_integer(1), + ), + der.encode_sequence( + der.encode_octet_string( + number_to_string(NIST256p.curve.a() % curve_p, curve_p) + ), + der.encode_octet_string( + number_to_string(NIST256p.curve.b(), curve_p) + ), + ), + der.encode_octet_string( + NIST256p.generator.to_bytes("uncompressed") + ), + der.encode_integer(NIST256p.generator.order()), + ) + + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_der(bad_der) + + self.assertIn("Prime-p element", str(e.exception)) + + +@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) +def test_curve_params_encode_decode_named(curve): + ret = Curve.from_der(curve.to_der("named_curve")) + + assert curve == ret + + +@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) +def test_curve_params_encode_decode_explicit(curve): + ret = Curve.from_der(curve.to_der("explicit")) + + assert curve == ret + + +@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) +def test_curve_params_encode_decode_default(curve): + ret = Curve.from_der(curve.to_der()) + + assert curve == ret + + +@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) +def test_curve_params_encode_decode_explicit_compressed(curve): + ret = Curve.from_der(curve.to_der("explicit", "compressed")) + + assert curve == ret diff --git a/venv/lib/python3.10/site-packages/ecdsa/test_der.py b/venv/lib/python3.10/site-packages/ecdsa/test_der.py new file mode 100644 index 0000000..5d00ba6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/test_der.py @@ -0,0 +1,476 @@ +# compatibility with Python 2.6, for that we need unittest2 package, +# which is not available on 3.3 or 3.4 +import warnings +from binascii import hexlify + +try: + import unittest2 as unittest +except ImportError: + import unittest +from six import b +import hypothesis.strategies as st +from hypothesis import given +import pytest +from ._compat import str_idx_as_int +from .curves import NIST256p, NIST224p +from .der import ( + remove_integer, + UnexpectedDER, + read_length, + encode_bitstring, + remove_bitstring, + remove_object, + encode_oid, + remove_constructed, + remove_octet_string, + remove_sequence, +) + + +class TestRemoveInteger(unittest.TestCase): + # DER requires the integers to be 0-padded only if they would be + # interpreted as negative, check if those errors are detected + def test_non_minimal_encoding(self): + with self.assertRaises(UnexpectedDER): + remove_integer(b("\x02\x02\x00\x01")) + + def test_negative_with_high_bit_set(self): + with self.assertRaises(UnexpectedDER): + remove_integer(b("\x02\x01\x80")) + + def test_minimal_with_high_bit_set(self): + val, rem = remove_integer(b("\x02\x02\x00\x80")) + + self.assertEqual(val, 0x80) + self.assertEqual(rem, b"") + + def test_two_zero_bytes_with_high_bit_set(self): + with self.assertRaises(UnexpectedDER): + remove_integer(b("\x02\x03\x00\x00\xff")) + + def test_zero_length_integer(self): + with self.assertRaises(UnexpectedDER): + remove_integer(b("\x02\x00")) + + def test_empty_string(self): + with self.assertRaises(UnexpectedDER): + remove_integer(b("")) + + def test_encoding_of_zero(self): + val, rem = remove_integer(b("\x02\x01\x00")) + + self.assertEqual(val, 0) + self.assertEqual(rem, b"") + + def test_encoding_of_127(self): + val, rem = remove_integer(b("\x02\x01\x7f")) + + self.assertEqual(val, 127) + self.assertEqual(rem, b"") + + def test_encoding_of_128(self): + val, rem = remove_integer(b("\x02\x02\x00\x80")) + + self.assertEqual(val, 128) + self.assertEqual(rem, b"") + + def test_wrong_tag(self): + with self.assertRaises(UnexpectedDER) as e: + remove_integer(b"\x01\x02\x00\x80") + + self.assertIn("wanted type 'integer'", str(e.exception)) + + def test_wrong_length(self): + with self.assertRaises(UnexpectedDER) as e: + remove_integer(b"\x02\x03\x00\x80") + + self.assertIn("Length longer", str(e.exception)) + + +class TestReadLength(unittest.TestCase): + # DER requires the lengths between 0 and 127 to be encoded using the short + # form and lengths above that encoded with minimal number of bytes + # necessary + def test_zero_length(self): + self.assertEqual((0, 1), read_length(b("\x00"))) + + def test_two_byte_zero_length(self): + with self.assertRaises(UnexpectedDER): + read_length(b("\x81\x00")) + + def test_two_byte_small_length(self): + with self.assertRaises(UnexpectedDER): + read_length(b("\x81\x7f")) + + def test_long_form_with_zero_length(self): + with self.assertRaises(UnexpectedDER): + read_length(b("\x80")) + + def test_smallest_two_byte_length(self): + self.assertEqual((128, 2), read_length(b("\x81\x80"))) + + def test_zero_padded_length(self): + with self.assertRaises(UnexpectedDER): + read_length(b("\x82\x00\x80")) + + def test_two_three_byte_length(self): + self.assertEqual((256, 3), read_length(b"\x82\x01\x00")) + + def test_empty_string(self): + with self.assertRaises(UnexpectedDER): + read_length(b("")) + + def test_length_overflow(self): + with self.assertRaises(UnexpectedDER): + read_length(b("\x83\x01\x00")) + + +class TestEncodeBitstring(unittest.TestCase): + # DER requires BIT STRINGS to include a number of padding bits in the + # encoded byte string, that padding must be between 0 and 7 + + def test_old_call_convention(self): + """This is the old way to use the function.""" + warnings.simplefilter("always") + with pytest.warns(DeprecationWarning) as warns: + der = encode_bitstring(b"\x00\xff") + + self.assertEqual(len(warns), 1) + self.assertIn( + "unused= needs to be specified", warns[0].message.args[0] + ) + + self.assertEqual(der, b"\x03\x02\x00\xff") + + def test_new_call_convention(self): + """This is how it should be called now.""" + warnings.simplefilter("always") + with pytest.warns(None) as warns: + der = encode_bitstring(b"\xff", 0) + + # verify that new call convention doesn't raise Warnings + self.assertEqual(len(warns), 0) + + self.assertEqual(der, b"\x03\x02\x00\xff") + + def test_implicit_unused_bits(self): + """ + Writing bit string with already included the number of unused bits. + """ + warnings.simplefilter("always") + with pytest.warns(None) as warns: + der = encode_bitstring(b"\x00\xff", None) + + # verify that new call convention doesn't raise Warnings + self.assertEqual(len(warns), 0) + + self.assertEqual(der, b"\x03\x02\x00\xff") + + def test_explicit_unused_bits(self): + der = encode_bitstring(b"\xff\xf0", 4) + + self.assertEqual(der, b"\x03\x03\x04\xff\xf0") + + def test_empty_string(self): + self.assertEqual(encode_bitstring(b"", 0), b"\x03\x01\x00") + + def test_invalid_unused_count(self): + with self.assertRaises(ValueError): + encode_bitstring(b"\xff\x00", 8) + + def test_invalid_unused_with_empty_string(self): + with self.assertRaises(ValueError): + encode_bitstring(b"", 1) + + def test_non_zero_padding_bits(self): + with self.assertRaises(ValueError): + encode_bitstring(b"\xff", 2) + + +class TestRemoveBitstring(unittest.TestCase): + def test_old_call_convention(self): + """This is the old way to call the function.""" + warnings.simplefilter("always") + with pytest.warns(DeprecationWarning) as warns: + bits, rest = remove_bitstring(b"\x03\x02\x00\xff") + + self.assertEqual(len(warns), 1) + self.assertIn( + "expect_unused= needs to be specified", warns[0].message.args[0] + ) + + self.assertEqual(bits, b"\x00\xff") + self.assertEqual(rest, b"") + + def test_new_call_convention(self): + warnings.simplefilter("always") + with pytest.warns(None) as warns: + bits, rest = remove_bitstring(b"\x03\x02\x00\xff", 0) + + self.assertEqual(len(warns), 0) + + self.assertEqual(bits, b"\xff") + self.assertEqual(rest, b"") + + def test_implicit_unexpected_unused(self): + warnings.simplefilter("always") + with pytest.warns(None) as warns: + bits, rest = remove_bitstring(b"\x03\x02\x00\xff", None) + + self.assertEqual(len(warns), 0) + + self.assertEqual(bits, (b"\xff", 0)) + self.assertEqual(rest, b"") + + def test_with_padding(self): + ret, rest = remove_bitstring(b"\x03\x02\x04\xf0", None) + + self.assertEqual(ret, (b"\xf0", 4)) + self.assertEqual(rest, b"") + + def test_not_a_bitstring(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x02\x02\x00\xff", None) + + def test_empty_encoding(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x03\x00", None) + + def test_empty_string(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"", None) + + def test_no_length(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x03", None) + + def test_unexpected_number_of_unused_bits(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x03\x02\x00\xff", 1) + + def test_invalid_encoding_of_unused_bits(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x03\x03\x08\xff\x00", None) + + def test_invalid_encoding_of_empty_string(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x03\x01\x01", None) + + def test_invalid_padding_bits(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x03\x02\x01\xff", None) + + +class TestStrIdxAsInt(unittest.TestCase): + def test_str(self): + self.assertEqual(115, str_idx_as_int("str", 0)) + + def test_bytes(self): + self.assertEqual(115, str_idx_as_int(b"str", 0)) + + def test_bytearray(self): + self.assertEqual(115, str_idx_as_int(bytearray(b"str"), 0)) + + +class TestEncodeOid(unittest.TestCase): + def test_pub_key_oid(self): + oid_ecPublicKey = encode_oid(1, 2, 840, 10045, 2, 1) + self.assertEqual(hexlify(oid_ecPublicKey), b("06072a8648ce3d0201")) + + def test_nist224p_oid(self): + self.assertEqual(hexlify(NIST224p.encoded_oid), b("06052b81040021")) + + def test_nist256p_oid(self): + self.assertEqual( + hexlify(NIST256p.encoded_oid), b"06082a8648ce3d030107" + ) + + def test_large_second_subid(self): + # from X.690, section 8.19.5 + oid = encode_oid(2, 999, 3) + self.assertEqual(oid, b"\x06\x03\x88\x37\x03") + + def test_with_two_subids(self): + oid = encode_oid(2, 999) + self.assertEqual(oid, b"\x06\x02\x88\x37") + + def test_zero_zero(self): + oid = encode_oid(0, 0) + self.assertEqual(oid, b"\x06\x01\x00") + + def test_with_wrong_types(self): + with self.assertRaises((TypeError, AssertionError)): + encode_oid(0, None) + + def test_with_small_first_large_second(self): + with self.assertRaises(AssertionError): + encode_oid(1, 40) + + def test_small_first_max_second(self): + oid = encode_oid(1, 39) + self.assertEqual(oid, b"\x06\x01\x4f") + + def test_with_invalid_first(self): + with self.assertRaises(AssertionError): + encode_oid(3, 39) + + +class TestRemoveObject(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.oid_ecPublicKey = encode_oid(1, 2, 840, 10045, 2, 1) + + def test_pub_key_oid(self): + oid, rest = remove_object(self.oid_ecPublicKey) + self.assertEqual(rest, b"") + self.assertEqual(oid, (1, 2, 840, 10045, 2, 1)) + + def test_with_extra_bytes(self): + oid, rest = remove_object(self.oid_ecPublicKey + b"more") + self.assertEqual(rest, b"more") + self.assertEqual(oid, (1, 2, 840, 10045, 2, 1)) + + def test_with_large_second_subid(self): + # from X.690, section 8.19.5 + oid, rest = remove_object(b"\x06\x03\x88\x37\x03") + self.assertEqual(rest, b"") + self.assertEqual(oid, (2, 999, 3)) + + def test_with_padded_first_subid(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06\x02\x80\x00") + + def test_with_padded_second_subid(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06\x04\x88\x37\x80\x01") + + def test_with_missing_last_byte_of_multi_byte(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06\x03\x88\x37\x83") + + def test_with_two_subids(self): + oid, rest = remove_object(b"\x06\x02\x88\x37") + self.assertEqual(rest, b"") + self.assertEqual(oid, (2, 999)) + + def test_zero_zero(self): + oid, rest = remove_object(b"\x06\x01\x00") + self.assertEqual(rest, b"") + self.assertEqual(oid, (0, 0)) + + def test_empty_string(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"") + + def test_missing_length(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06") + + def test_empty_oid(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06\x00") + + def test_empty_oid_overflow(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06\x01") + + def test_with_wrong_type(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x04\x02\x88\x37") + + def test_with_too_long_length(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06\x03\x88\x37") + + +class TestRemoveConstructed(unittest.TestCase): + def test_simple(self): + data = b"\xa1\x02\xff\xaa" + + tag, body, rest = remove_constructed(data) + + self.assertEqual(tag, 0x01) + self.assertEqual(body, b"\xff\xaa") + self.assertEqual(rest, b"") + + def test_with_malformed_tag(self): + data = b"\x01\x02\xff\xaa" + + with self.assertRaises(UnexpectedDER) as e: + remove_constructed(data) + + self.assertIn("constructed tag", str(e.exception)) + + +class TestRemoveOctetString(unittest.TestCase): + def test_simple(self): + data = b"\x04\x03\xaa\xbb\xcc" + body, rest = remove_octet_string(data) + self.assertEqual(body, b"\xaa\xbb\xcc") + self.assertEqual(rest, b"") + + def test_with_malformed_tag(self): + data = b"\x03\x03\xaa\xbb\xcc" + with self.assertRaises(UnexpectedDER) as e: + remove_octet_string(data) + + self.assertIn("octetstring", str(e.exception)) + + +class TestRemoveSequence(unittest.TestCase): + def test_simple(self): + data = b"\x30\x02\xff\xaa" + body, rest = remove_sequence(data) + self.assertEqual(body, b"\xff\xaa") + self.assertEqual(rest, b"") + + def test_with_empty_string(self): + with self.assertRaises(UnexpectedDER) as e: + remove_sequence(b"") + + self.assertIn("Empty string", str(e.exception)) + + def test_with_wrong_tag(self): + data = b"\x20\x02\xff\xaa" + + with self.assertRaises(UnexpectedDER) as e: + remove_sequence(data) + + self.assertIn("wanted type 'sequence'", str(e.exception)) + + def test_with_wrong_length(self): + data = b"\x30\x03\xff\xaa" + + with self.assertRaises(UnexpectedDER) as e: + remove_sequence(data) + + self.assertIn("Length longer", str(e.exception)) + + +@st.composite +def st_oid(draw, max_value=2 ** 512, max_size=50): + """ + Hypothesis strategy that returns valid OBJECT IDENTIFIERs as tuples + + :param max_value: maximum value of any single sub-identifier + :param max_size: maximum length of the generated OID + """ + first = draw(st.integers(min_value=0, max_value=2)) + if first < 2: + second = draw(st.integers(min_value=0, max_value=39)) + else: + second = draw(st.integers(min_value=0, max_value=max_value)) + rest = draw( + st.lists( + st.integers(min_value=0, max_value=max_value), max_size=max_size + ) + ) + return (first, second) + tuple(rest) + + +@given(st_oid()) +def test_oids(ids): + encoded_oid = encode_oid(*ids) + decoded_oid, rest = remove_object(encoded_oid) + assert rest == b"" + assert decoded_oid == ids diff --git a/venv/lib/python3.10/site-packages/ecdsa/test_ecdh.py b/venv/lib/python3.10/site-packages/ecdsa/test_ecdh.py new file mode 100644 index 0000000..62d35c8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/test_ecdh.py @@ -0,0 +1,431 @@ +import os +import shutil +import subprocess +import pytest +from binascii import unhexlify + +try: + import unittest2 as unittest +except ImportError: + import unittest + +from .curves import ( + NIST192p, + NIST224p, + NIST256p, + NIST384p, + NIST521p, + BRAINPOOLP160r1, +) +from .curves import curves +from .ecdh import ( + ECDH, + InvalidCurveError, + InvalidSharedSecretError, + NoKeyError, + NoCurveError, +) +from .keys import SigningKey, VerifyingKey + + +@pytest.mark.parametrize( + "vcurve", curves, ids=[curve.name for curve in curves] +) +def test_ecdh_each(vcurve): + ecdh1 = ECDH(curve=vcurve) + ecdh2 = ECDH(curve=vcurve) + + ecdh2.generate_private_key() + ecdh1.load_received_public_key(ecdh2.get_public_key()) + ecdh2.load_received_public_key(ecdh1.generate_private_key()) + + secret1 = ecdh1.generate_sharedsecret_bytes() + secret2 = ecdh2.generate_sharedsecret_bytes() + assert secret1 == secret2 + + +def test_ecdh_both_keys_present(): + key1 = SigningKey.generate(BRAINPOOLP160r1) + key2 = SigningKey.generate(BRAINPOOLP160r1) + + ecdh1 = ECDH(BRAINPOOLP160r1, key1, key2.verifying_key) + ecdh2 = ECDH(private_key=key2, public_key=key1.verifying_key) + + secret1 = ecdh1.generate_sharedsecret_bytes() + secret2 = ecdh2.generate_sharedsecret_bytes() + + assert secret1 == secret2 + + +def test_ecdh_no_public_key(): + ecdh1 = ECDH(curve=NIST192p) + + with pytest.raises(NoKeyError): + ecdh1.generate_sharedsecret_bytes() + + ecdh1.generate_private_key() + + with pytest.raises(NoKeyError): + ecdh1.generate_sharedsecret_bytes() + + +class TestECDH(unittest.TestCase): + def test_load_key_from_wrong_curve(self): + ecdh1 = ECDH() + ecdh1.set_curve(NIST192p) + + key1 = SigningKey.generate(BRAINPOOLP160r1) + + with self.assertRaises(InvalidCurveError) as e: + ecdh1.load_private_key(key1) + + self.assertIn("Curve mismatch", str(e.exception)) + + def test_generate_without_curve(self): + ecdh1 = ECDH() + + with self.assertRaises(NoCurveError) as e: + ecdh1.generate_private_key() + + self.assertIn("Curve must be set", str(e.exception)) + + def test_load_bytes_without_curve_set(self): + ecdh1 = ECDH() + + with self.assertRaises(NoCurveError) as e: + ecdh1.load_private_key_bytes(b"\x01" * 32) + + self.assertIn("Curve must be set", str(e.exception)) + + def test_set_curve_from_received_public_key(self): + ecdh1 = ECDH() + + key1 = SigningKey.generate(BRAINPOOLP160r1) + + ecdh1.load_received_public_key(key1.verifying_key) + + self.assertEqual(ecdh1.curve, BRAINPOOLP160r1) + + +def test_ecdh_wrong_public_key_curve(): + ecdh1 = ECDH(curve=NIST192p) + ecdh1.generate_private_key() + ecdh2 = ECDH(curve=NIST256p) + ecdh2.generate_private_key() + + with pytest.raises(InvalidCurveError): + ecdh1.load_received_public_key(ecdh2.get_public_key()) + + with pytest.raises(InvalidCurveError): + ecdh2.load_received_public_key(ecdh1.get_public_key()) + + ecdh1.public_key = ecdh2.get_public_key() + ecdh2.public_key = ecdh1.get_public_key() + + with pytest.raises(InvalidCurveError): + ecdh1.generate_sharedsecret_bytes() + + with pytest.raises(InvalidCurveError): + ecdh2.generate_sharedsecret_bytes() + + +def test_ecdh_invalid_shared_secret_curve(): + ecdh1 = ECDH(curve=NIST256p) + ecdh1.generate_private_key() + + ecdh1.load_received_public_key( + SigningKey.generate(NIST256p).get_verifying_key() + ) + + ecdh1.private_key.privkey.secret_multiplier = ecdh1.private_key.curve.order + + with pytest.raises(InvalidSharedSecretError): + ecdh1.generate_sharedsecret_bytes() + + +# https://github.com/scogliani/ecc-test-vectors/blob/master/ecdh_kat/secp192r1.txt +# https://github.com/scogliani/ecc-test-vectors/blob/master/ecdh_kat/secp256r1.txt +# https://github.com/coruus/nist-testvectors/blob/master/csrc.nist.gov/groups/STM/cavp/documents/components/ecccdhtestvectors/KAS_ECC_CDH_PrimitiveTest.txt +@pytest.mark.parametrize( + "curve,privatekey,pubkey,secret", + [ + pytest.param( + NIST192p, + "f17d3fea367b74d340851ca4270dcb24c271f445bed9d527", + "42ea6dd9969dd2a61fea1aac7f8e98edcc896c6e55857cc0" + "dfbe5d7c61fac88b11811bde328e8a0d12bf01a9d204b523", + "803d8ab2e5b6e6fca715737c3a82f7ce3c783124f6d51cd0", + id="NIST192p-1", + ), + pytest.param( + NIST192p, + "56e853349d96fe4c442448dacb7cf92bb7a95dcf574a9bd5", + "deb5712fa027ac8d2f22c455ccb73a91e17b6512b5e030e7" + "7e2690a02cc9b28708431a29fb54b87b1f0c14e011ac2125", + "c208847568b98835d7312cef1f97f7aa298283152313c29d", + id="NIST192p-2", + ), + pytest.param( + NIST192p, + "c6ef61fe12e80bf56f2d3f7d0bb757394519906d55500949", + "4edaa8efc5a0f40f843663ec5815e7762dddc008e663c20f" + "0a9f8dc67a3e60ef6d64b522185d03df1fc0adfd42478279", + "87229107047a3b611920d6e3b2c0c89bea4f49412260b8dd", + id="NIST192p-3", + ), + pytest.param( + NIST192p, + "e6747b9c23ba7044f38ff7e62c35e4038920f5a0163d3cda", + "8887c276edeed3e9e866b46d58d895c73fbd80b63e382e88" + "04c5097ba6645e16206cfb70f7052655947dd44a17f1f9d5", + "eec0bed8fc55e1feddc82158fd6dc0d48a4d796aaf47d46c", + id="NIST192p-4", + ), + pytest.param( + NIST192p, + "beabedd0154a1afcfc85d52181c10f5eb47adc51f655047d", + "0d045f30254adc1fcefa8a5b1f31bf4e739dd327cd18d594" + "542c314e41427c08278a08ce8d7305f3b5b849c72d8aff73", + "716e743b1b37a2cd8479f0a3d5a74c10ba2599be18d7e2f4", + id="NIST192p-5", + ), + pytest.param( + NIST192p, + "cf70354226667321d6e2baf40999e2fd74c7a0f793fa8699", + "fb35ca20d2e96665c51b98e8f6eb3d79113508d8bccd4516" + "368eec0d5bfb847721df6aaff0e5d48c444f74bf9cd8a5a7", + "f67053b934459985a315cb017bf0302891798d45d0e19508", + id="NIST192p-6", + ), + pytest.param( + NIST224p, + "8346a60fc6f293ca5a0d2af68ba71d1dd389e5e40837942df3e43cbd", + "af33cd0629bc7e996320a3f40368f74de8704fa37b8fab69abaae280" + "882092ccbba7930f419a8a4f9bb16978bbc3838729992559a6f2e2d7", + "7d96f9a3bd3c05cf5cc37feb8b9d5209d5c2597464dec3e9983743e8", + id="NIST224p", + ), + pytest.param( + NIST256p, + "7d7dc5f71eb29ddaf80d6214632eeae03d9058af1fb6d22ed80badb62bc1a534", + "700c48f77f56584c5cc632ca65640db91b6bacce3a4df6b42ce7cc838833d287" + "db71e509e3fd9b060ddb20ba5c51dcc5948d46fbf640dfe0441782cab85fa4ac", + "46fc62106420ff012e54a434fbdd2d25ccc5852060561e68040dd7778997bd7b", + id="NIST256p-1", + ), + pytest.param( + NIST256p, + "38f65d6dce47676044d58ce5139582d568f64bb16098d179dbab07741dd5caf5", + "809f04289c64348c01515eb03d5ce7ac1a8cb9498f5caa50197e58d43a86a7ae" + "b29d84e811197f25eba8f5194092cb6ff440e26d4421011372461f579271cda3", + "057d636096cb80b67a8c038c890e887d1adfa4195e9b3ce241c8a778c59cda67", + id="NIST256p-2", + ), + pytest.param( + NIST256p, + "1accfaf1b97712b85a6f54b148985a1bdc4c9bec0bd258cad4b3d603f49f32c8", + "a2339c12d4a03c33546de533268b4ad667debf458b464d77443636440ee7fec3" + "ef48a3ab26e20220bcda2c1851076839dae88eae962869a497bf73cb66faf536", + "2d457b78b4614132477618a5b077965ec90730a8c81a1c75d6d4ec68005d67ec", + id="NIST256p-3", + ), + pytest.param( + NIST256p, + "207c43a79bfee03db6f4b944f53d2fb76cc49ef1c9c4d34d51b6c65c4db6932d", + "df3989b9fa55495719b3cf46dccd28b5153f7808191dd518eff0c3cff2b705ed" + "422294ff46003429d739a33206c8752552c8ba54a270defc06e221e0feaf6ac4", + "96441259534b80f6aee3d287a6bb17b5094dd4277d9e294f8fe73e48bf2a0024", + id="NIST256p-4", + ), + pytest.param( + NIST256p, + "59137e38152350b195c9718d39673d519838055ad908dd4757152fd8255c09bf", + "41192d2813e79561e6a1d6f53c8bc1a433a199c835e141b05a74a97b0faeb922" + "1af98cc45e98a7e041b01cf35f462b7562281351c8ebf3ffa02e33a0722a1328", + "19d44c8d63e8e8dd12c22a87b8cd4ece27acdde04dbf47f7f27537a6999a8e62", + id="NIST256p-5", + ), + pytest.param( + NIST256p, + "f5f8e0174610a661277979b58ce5c90fee6c9b3bb346a90a7196255e40b132ef", + "33e82092a0f1fb38f5649d5867fba28b503172b7035574bf8e5b7100a3052792" + "f2cf6b601e0a05945e335550bf648d782f46186c772c0f20d3cd0d6b8ca14b2f", + "664e45d5bba4ac931cd65d52017e4be9b19a515f669bea4703542a2c525cd3d3", + id="NIST256p-6", + ), + pytest.param( + NIST384p, + "3cc3122a68f0d95027ad38c067916ba0eb8c38894d22e1b1" + "5618b6818a661774ad463b205da88cf699ab4d43c9cf98a1", + "a7c76b970c3b5fe8b05d2838ae04ab47697b9eaf52e76459" + "2efda27fe7513272734466b400091adbf2d68c58e0c50066" + "ac68f19f2e1cb879aed43a9969b91a0839c4c38a49749b66" + "1efedf243451915ed0905a32b060992b468c64766fc8437a", + "5f9d29dc5e31a163060356213669c8ce132e22f57c9a04f4" + "0ba7fcead493b457e5621e766c40a2e3d4d6a04b25e533f1", + id="NIST384p", + ), + pytest.param( + NIST521p, + "017eecc07ab4b329068fba65e56a1f8890aa935e57134ae0ffcce802735151f4ea" + "c6564f6ee9974c5e6887a1fefee5743ae2241bfeb95d5ce31ddcb6f9edb4d6fc47", + "00685a48e86c79f0f0875f7bc18d25eb5fc8c0b07e5da4f4370f3a949034085433" + "4b1e1b87fa395464c60626124a4e70d0f785601d37c09870ebf176666877a2046d" + "01ba52c56fc8776d9e8f5db4f0cc27636d0b741bbe05400697942e80b739884a83" + "bde99e0f6716939e632bc8986fa18dccd443a348b6c3e522497955a4f3c302f676", + "005fc70477c3e63bc3954bd0df3ea0d1f41ee21746ed95fc5e1fdf90930d5e1366" + "72d72cc770742d1711c3c3a4c334a0ad9759436a4d3c5bf6e74b9578fac148c831", + id="NIST521p", + ), + ], +) +def test_ecdh_NIST(curve, privatekey, pubkey, secret): + ecdh = ECDH(curve=curve) + ecdh.load_private_key_bytes(unhexlify(privatekey)) + ecdh.load_received_public_key_bytes(unhexlify(pubkey)) + + sharedsecret = ecdh.generate_sharedsecret_bytes() + + assert sharedsecret == unhexlify(secret) + + +pem_local_private_key = ( + "-----BEGIN EC PRIVATE KEY-----\n" + "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" + "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" + "bA==\n" + "-----END EC PRIVATE KEY-----\n" +) +der_local_private_key = ( + "305f02010104185ec8420bd6ef9252a942e989043ca29f561fa525770eb1c5a00a06082a864" + "8ce3d030101a13403320004b88177d084ef17f5e45639408028360f9f59b4a4d7264e62da06" + "51dce47a35a4c5b45cf51593423a8b557b9c2099f36c" +) +pem_remote_public_key = ( + "-----BEGIN PUBLIC KEY-----\n" + "MEkwEwYHKoZIzj0CAQYIKoZIzj0DAQEDMgAEuIF30ITvF/XkVjlAgCg2D59ZtKTX\n" + "Jk5i2gZR3OR6NaTFtFz1FZNCOotVe5wgmfNs\n" + "-----END PUBLIC KEY-----\n" +) +der_remote_public_key = ( + "3049301306072a8648ce3d020106082a8648ce3d03010103320004b88177d084ef17f5e4563" + "9408028360f9f59b4a4d7264e62da0651dce47a35a4c5b45cf51593423a8b557b9c2099f36c" +) +gshared_secret = "8f457e34982478d1c34b9cd2d0c15911b72dd60d869e2cea" + + +def test_ecdh_pem(): + ecdh = ECDH() + ecdh.load_private_key_pem(pem_local_private_key) + ecdh.load_received_public_key_pem(pem_remote_public_key) + + sharedsecret = ecdh.generate_sharedsecret_bytes() + + assert sharedsecret == unhexlify(gshared_secret) + + +def test_ecdh_der(): + ecdh = ECDH() + ecdh.load_private_key_der(unhexlify(der_local_private_key)) + ecdh.load_received_public_key_der(unhexlify(der_remote_public_key)) + + sharedsecret = ecdh.generate_sharedsecret_bytes() + + assert sharedsecret == unhexlify(gshared_secret) + + +# Exception classes used by run_openssl. +class RunOpenSslError(Exception): + pass + + +def run_openssl(cmd): + OPENSSL = "openssl" + p = subprocess.Popen( + [OPENSSL] + cmd.split(), + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + stdout, ignored = p.communicate() + if p.returncode != 0: + raise RunOpenSslError( + "cmd '%s %s' failed: rc=%s, stdout/err was %s" + % (OPENSSL, cmd, p.returncode, stdout) + ) + return stdout.decode() + + +OPENSSL_SUPPORTED_CURVES = set( + c.split(":")[0].strip() + for c in run_openssl("ecparam -list_curves").split("\n") +) + + +@pytest.mark.parametrize( + "vcurve", curves, ids=[curve.name for curve in curves] +) +def test_ecdh_with_openssl(vcurve): + assert vcurve.openssl_name + + if vcurve.openssl_name not in OPENSSL_SUPPORTED_CURVES: + pytest.skip("system openssl does not support " + vcurve.openssl_name) + + try: + hlp = run_openssl("pkeyutl -help") + if hlp.find("-derive") == 0: # pragma: no cover + pytest.skip("system openssl does not support `pkeyutl -derive`") + except RunOpenSslError: # pragma: no cover + pytest.skip("system openssl could not be executed") + + if os.path.isdir("t"): # pragma: no branch + shutil.rmtree("t") + os.mkdir("t") + run_openssl( + "ecparam -name %s -genkey -out t/privkey1.pem" % vcurve.openssl_name + ) + run_openssl( + "ecparam -name %s -genkey -out t/privkey2.pem" % vcurve.openssl_name + ) + run_openssl("ec -in t/privkey1.pem -pubout -out t/pubkey1.pem") + + ecdh1 = ECDH(curve=vcurve) + ecdh2 = ECDH(curve=vcurve) + with open("t/privkey1.pem") as e: + key = e.read() + ecdh1.load_private_key_pem(key) + with open("t/privkey2.pem") as e: + key = e.read() + ecdh2.load_private_key_pem(key) + + with open("t/pubkey1.pem") as e: + key = e.read() + vk1 = VerifyingKey.from_pem(key) + assert vk1.to_string() == ecdh1.get_public_key().to_string() + vk2 = ecdh2.get_public_key() + with open("t/pubkey2.pem", "wb") as e: + e.write(vk2.to_pem()) + + ecdh1.load_received_public_key(vk2) + ecdh2.load_received_public_key(vk1) + secret1 = ecdh1.generate_sharedsecret_bytes() + secret2 = ecdh2.generate_sharedsecret_bytes() + + assert secret1 == secret2 + + run_openssl( + "pkeyutl -derive -inkey t/privkey1.pem -peerkey t/pubkey2.pem -out t/secret1" + ) + run_openssl( + "pkeyutl -derive -inkey t/privkey2.pem -peerkey t/pubkey1.pem -out t/secret2" + ) + + with open("t/secret1", "rb") as e: + ssl_secret1 = e.read() + with open("t/secret1", "rb") as e: + ssl_secret2 = e.read() + + assert len(ssl_secret1) == vk1.curve.verifying_key_length // 2 + assert len(secret1) == vk1.curve.verifying_key_length // 2 + + assert ssl_secret1 == ssl_secret2 + assert secret1 == ssl_secret1 diff --git a/venv/lib/python3.10/site-packages/ecdsa/test_ecdsa.py b/venv/lib/python3.10/site-packages/ecdsa/test_ecdsa.py new file mode 100644 index 0000000..dbc4a6e --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/test_ecdsa.py @@ -0,0 +1,661 @@ +from __future__ import print_function +import sys +import hypothesis.strategies as st +from hypothesis import given, settings, note, example + +try: + import unittest2 as unittest +except ImportError: + import unittest +import pytest +from .ecdsa import ( + Private_key, + Public_key, + Signature, + generator_192, + digest_integer, + ellipticcurve, + point_is_valid, + generator_224, + generator_256, + generator_384, + generator_521, + generator_secp256k1, + curve_192, + InvalidPointError, + curve_112r2, + generator_112r2, + int_to_string, +) + + +HYP_SETTINGS = {} +# old hypothesis doesn't have the "deadline" setting +if sys.version_info > (2, 7): # pragma: no branch + # SEC521p is slow, allow long execution for it + HYP_SETTINGS["deadline"] = 5000 + + +class TestP192FromX9_62(unittest.TestCase): + """Check test vectors from X9.62""" + + @classmethod + def setUpClass(cls): + cls.d = 651056770906015076056810763456358567190100156695615665659 + cls.Q = cls.d * generator_192 + cls.k = 6140507067065001063065065565667405560006161556565665656654 + cls.R = cls.k * generator_192 + + cls.msg = 968236873715988614170569073515315707566766479517 + cls.pubk = Public_key(generator_192, generator_192 * cls.d) + cls.privk = Private_key(cls.pubk, cls.d) + cls.sig = cls.privk.sign(cls.msg, cls.k) + + def test_point_multiplication(self): + assert self.Q.x() == 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5 + + def test_point_multiplication_2(self): + assert self.R.x() == 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD + assert self.R.y() == 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 + + def test_mult_and_addition(self): + u1 = 2563697409189434185194736134579731015366492496392189760599 + u2 = 6266643813348617967186477710235785849136406323338782220568 + temp = u1 * generator_192 + u2 * self.Q + assert temp.x() == 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD + assert temp.y() == 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 + + def test_signature(self): + r, s = self.sig.r, self.sig.s + assert r == 3342403536405981729393488334694600415596881826869351677613 + assert s == 5735822328888155254683894997897571951568553642892029982342 + + def test_verification(self): + assert self.pubk.verifies(self.msg, self.sig) + + def test_rejection(self): + assert not self.pubk.verifies(self.msg - 1, self.sig) + + +class TestPublicKey(unittest.TestCase): + def test_equality_public_keys(self): + gen = generator_192 + x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point = ellipticcurve.Point(gen.curve(), x, y) + pub_key1 = Public_key(gen, point) + pub_key2 = Public_key(gen, point) + self.assertEqual(pub_key1, pub_key2) + + def test_inequality_public_key(self): + gen = generator_192 + x1 = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y1 = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point1 = ellipticcurve.Point(gen.curve(), x1, y1) + + x2 = 0x6A223D00BD22C52833409A163E057E5B5DA1DEF2A197DD15 + y2 = 0x7B482604199367F1F303F9EF627F922F97023E90EAE08ABF + point2 = ellipticcurve.Point(gen.curve(), x2, y2) + + pub_key1 = Public_key(gen, point1) + pub_key2 = Public_key(gen, point2) + self.assertNotEqual(pub_key1, pub_key2) + + def test_inequality_different_curves(self): + gen = generator_192 + x1 = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y1 = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point1 = ellipticcurve.Point(gen.curve(), x1, y1) + + x2 = 0x722BA0FB6B8FC8898A4C6AB49E66 + y2 = 0x2B7344BB57A7ABC8CA0F1A398C7D + point2 = ellipticcurve.Point(generator_112r2.curve(), x2, y2) + + pub_key1 = Public_key(gen, point1) + pub_key2 = Public_key(generator_112r2, point2) + self.assertNotEqual(pub_key1, pub_key2) + + def test_inequality_public_key_not_implemented(self): + gen = generator_192 + x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point = ellipticcurve.Point(gen.curve(), x, y) + pub_key = Public_key(gen, point) + self.assertNotEqual(pub_key, None) + + def test_public_key_with_generator_without_order(self): + gen = ellipticcurve.PointJacobi( + generator_192.curve(), generator_192.x(), generator_192.y(), 1 + ) + + x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point = ellipticcurve.Point(gen.curve(), x, y) + + with self.assertRaises(InvalidPointError) as e: + Public_key(gen, point) + + self.assertIn("Generator point must have order", str(e.exception)) + + def test_public_point_on_curve_not_scalar_multiple_of_base_point(self): + x = 2 + y = 0xBE6AA4938EF7CFE6FE29595B6B00 + # we need a curve with cofactor != 1 + point = ellipticcurve.PointJacobi(curve_112r2, x, y, 1) + + self.assertTrue(curve_112r2.contains_point(x, y)) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_112r2, point) + + self.assertIn("Generator point order", str(e.exception)) + + def test_point_is_valid_with_not_scalar_multiple_of_base_point(self): + x = 2 + y = 0xBE6AA4938EF7CFE6FE29595B6B00 + + self.assertFalse(point_is_valid(generator_112r2, x, y)) + + # the tests to verify the extensiveness of tests in ecdsa.ecdsa + # if PointJacobi gets modified to calculate the x and y mod p the tests + # below will need to use a fake/mock object + def test_invalid_point_x_negative(self): + pt = ellipticcurve.PointJacobi(curve_192, -1, 0, 1) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_192, pt) + + self.assertIn("The public point has x or y", str(e.exception)) + + def test_invalid_point_x_equal_p(self): + pt = ellipticcurve.PointJacobi(curve_192, curve_192.p(), 0, 1) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_192, pt) + + self.assertIn("The public point has x or y", str(e.exception)) + + def test_invalid_point_y_negative(self): + pt = ellipticcurve.PointJacobi(curve_192, 0, -1, 1) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_192, pt) + + self.assertIn("The public point has x or y", str(e.exception)) + + def test_invalid_point_y_equal_p(self): + pt = ellipticcurve.PointJacobi(curve_192, 0, curve_192.p(), 1) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_192, pt) + + self.assertIn("The public point has x or y", str(e.exception)) + + +class TestPublicKeyVerifies(unittest.TestCase): + # test all the different ways that a signature can be publicly invalid + @classmethod + def setUpClass(cls): + gen = generator_192 + x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point = ellipticcurve.Point(gen.curve(), x, y) + + cls.pub_key = Public_key(gen, point) + + def test_sig_with_r_zero(self): + sig = Signature(0, 1) + + self.assertFalse(self.pub_key.verifies(1, sig)) + + def test_sig_with_r_order(self): + sig = Signature(generator_192.order(), 1) + + self.assertFalse(self.pub_key.verifies(1, sig)) + + def test_sig_with_s_zero(self): + sig = Signature(1, 0) + + self.assertFalse(self.pub_key.verifies(1, sig)) + + def test_sig_with_s_order(self): + sig = Signature(1, generator_192.order()) + + self.assertFalse(self.pub_key.verifies(1, sig)) + + +class TestPrivateKey(unittest.TestCase): + @classmethod + def setUpClass(cls): + gen = generator_192 + x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point = ellipticcurve.Point(gen.curve(), x, y) + cls.pub_key = Public_key(gen, point) + + def test_equality_private_keys(self): + pr_key1 = Private_key(self.pub_key, 100) + pr_key2 = Private_key(self.pub_key, 100) + self.assertEqual(pr_key1, pr_key2) + + def test_inequality_private_keys(self): + pr_key1 = Private_key(self.pub_key, 100) + pr_key2 = Private_key(self.pub_key, 200) + self.assertNotEqual(pr_key1, pr_key2) + + def test_inequality_private_keys_not_implemented(self): + pr_key = Private_key(self.pub_key, 100) + self.assertNotEqual(pr_key, None) + + +# Testing point validity, as per ECDSAVS.pdf B.2.2: +P192_POINTS = [ + ( + generator_192, + 0xCD6D0F029A023E9AACA429615B8F577ABEE685D8257CC83A, + 0x00019C410987680E9FB6C0B6ECC01D9A2647C8BAE27721BACDFC, + False, + ), + ( + generator_192, + 0x00017F2FCE203639E9EAF9FB50B81FC32776B30E3B02AF16C73B, + 0x95DA95C5E72DD48E229D4748D4EEE658A9A54111B23B2ADB, + False, + ), + ( + generator_192, + 0x4F77F8BC7FCCBADD5760F4938746D5F253EE2168C1CF2792, + 0x000147156FF824D131629739817EDB197717C41AAB5C2A70F0F6, + False, + ), + ( + generator_192, + 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6, + 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F, + True, + ), + ( + generator_192, + 0xCDF56C1AA3D8AFC53C521ADF3FFB96734A6A630A4A5B5A70, + 0x97C1C44A5FB229007B5EC5D25F7413D170068FFD023CAA4E, + True, + ), + ( + generator_192, + 0x89009C0DC361C81E99280C8E91DF578DF88CDF4B0CDEDCED, + 0x27BE44A529B7513E727251F128B34262A0FD4D8EC82377B9, + True, + ), + ( + generator_192, + 0x6A223D00BD22C52833409A163E057E5B5DA1DEF2A197DD15, + 0x7B482604199367F1F303F9EF627F922F97023E90EAE08ABF, + True, + ), + ( + generator_192, + 0x6DCCBDE75C0948C98DAB32EA0BC59FE125CF0FB1A3798EDA, + 0x0001171A3E0FA60CF3096F4E116B556198DE430E1FBD330C8835, + False, + ), + ( + generator_192, + 0xD266B39E1F491FC4ACBBBC7D098430931CFA66D55015AF12, + 0x193782EB909E391A3148B7764E6B234AA94E48D30A16DBB2, + False, + ), + ( + generator_192, + 0x9D6DDBCD439BAA0C6B80A654091680E462A7D1D3F1FFEB43, + 0x6AD8EFC4D133CCF167C44EB4691C80ABFFB9F82B932B8CAA, + False, + ), + ( + generator_192, + 0x146479D944E6BDA87E5B35818AA666A4C998A71F4E95EDBC, + 0xA86D6FE62BC8FBD88139693F842635F687F132255858E7F6, + False, + ), + ( + generator_192, + 0xE594D4A598046F3598243F50FD2C7BD7D380EDB055802253, + 0x509014C0C4D6B536E3CA750EC09066AF39B4C8616A53A923, + False, + ), +] + + +@pytest.mark.parametrize("generator,x,y,expected", P192_POINTS) +def test_point_validity(generator, x, y, expected): + """ + `generator` defines the curve; is `(x, y)` a point on + this curve? `expected` is True if the right answer is Yes. + """ + assert point_is_valid(generator, x, y) == expected + + +# Trying signature-verification tests from ECDSAVS.pdf B.2.4: +CURVE_192_KATS = [ + ( + generator_192, + int( + "0x84ce72aa8699df436059f052ac51b6398d2511e49631bcb7e71f89c499b9ee" + "425dfbc13a5f6d408471b054f2655617cbbaf7937b7c80cd8865cf02c8487d30" + "d2b0fbd8b2c4e102e16d828374bbc47b93852f212d5043c3ea720f086178ff79" + "8cc4f63f787b9c2e419efa033e7644ea7936f54462dc21a6c4580725f7f0e7d1" + "58", + 16, + ), + 0xD9DBFB332AA8E5FF091E8CE535857C37C73F6250FFB2E7AC, + 0x282102E364FEDED3AD15DDF968F88D8321AA268DD483EBC4, + 0x64DCA58A20787C488D11D6DD96313F1B766F2D8EFE122916, + 0x1ECBA28141E84AB4ECAD92F56720E2CC83EB3D22DEC72479, + True, + ), + ( + generator_192, + int( + "0x94bb5bacd5f8ea765810024db87f4224ad71362a3c28284b2b9f39fab86db1" + "2e8beb94aae899768229be8fdb6c4f12f28912bb604703a79ccff769c1607f5a" + "91450f30ba0460d359d9126cbd6296be6d9c4bb96c0ee74cbb44197c207f6db3" + "26ab6f5a659113a9034e54be7b041ced9dcf6458d7fb9cbfb2744d999f7dfd63" + "f4", + 16, + ), + 0x3E53EF8D3112AF3285C0E74842090712CD324832D4277AE7, + 0xCC75F8952D30AEC2CBB719FC6AA9934590B5D0FF5A83ADB7, + 0x8285261607283BA18F335026130BAB31840DCFD9C3E555AF, + 0x356D89E1B04541AFC9704A45E9C535CE4A50929E33D7E06C, + True, + ), + ( + generator_192, + int( + "0xf6227a8eeb34afed1621dcc89a91d72ea212cb2f476839d9b4243c66877911" + "b37b4ad6f4448792a7bbba76c63bdd63414b6facab7dc71c3396a73bd7ee14cd" + "d41a659c61c99b779cecf07bc51ab391aa3252386242b9853ea7da67fd768d30" + "3f1b9b513d401565b6f1eb722dfdb96b519fe4f9bd5de67ae131e64b40e78c42" + "dd", + 16, + ), + 0x16335DBE95F8E8254A4E04575D736BEFB258B8657F773CB7, + 0x421B13379C59BC9DCE38A1099CA79BBD06D647C7F6242336, + 0x4141BD5D64EA36C5B0BD21EF28C02DA216ED9D04522B1E91, + 0x159A6AA852BCC579E821B7BB0994C0861FB08280C38DAA09, + False, + ), + ( + generator_192, + int( + "0x16b5f93afd0d02246f662761ed8e0dd9504681ed02a253006eb36736b56309" + "7ba39f81c8e1bce7a16c1339e345efabbc6baa3efb0612948ae51103382a8ee8" + "bc448e3ef71e9f6f7a9676694831d7f5dd0db5446f179bcb737d4a526367a447" + "bfe2c857521c7f40b6d7d7e01a180d92431fb0bbd29c04a0c420a57b3ed26ccd" + "8a", + 16, + ), + 0xFD14CDF1607F5EFB7B1793037B15BDF4BAA6F7C16341AB0B, + 0x83FA0795CC6C4795B9016DAC928FD6BAC32F3229A96312C4, + 0x8DFDB832951E0167C5D762A473C0416C5C15BC1195667DC1, + 0x1720288A2DC13FA1EC78F763F8FE2FF7354A7E6FDDE44520, + False, + ), + ( + generator_192, + int( + "0x08a2024b61b79d260e3bb43ef15659aec89e5b560199bc82cf7c65c77d3919" + "2e03b9a895d766655105edd9188242b91fbde4167f7862d4ddd61e5d4ab55196" + "683d4f13ceb90d87aea6e07eb50a874e33086c4a7cb0273a8e1c4408f4b846bc" + "eae1ebaac1b2b2ea851a9b09de322efe34cebe601653efd6ddc876ce8c2f2072" + "fb", + 16, + ), + 0x674F941DC1A1F8B763C9334D726172D527B90CA324DB8828, + 0x65ADFA32E8B236CB33A3E84CF59BFB9417AE7E8EDE57A7FF, + 0x9508B9FDD7DAF0D8126F9E2BC5A35E4C6D800B5B804D7796, + 0x36F2BF6B21B987C77B53BB801B3435A577E3D493744BFAB0, + False, + ), + ( + generator_192, + int( + "0x1843aba74b0789d4ac6b0b8923848023a644a7b70afa23b1191829bbe4397c" + "e15b629bf21a8838298653ed0c19222b95fa4f7390d1b4c844d96e645537e0aa" + "e98afb5c0ac3bd0e4c37f8daaff25556c64e98c319c52687c904c4de7240a1cc" + "55cd9756b7edaef184e6e23b385726e9ffcba8001b8f574987c1a3fedaaa83ca" + "6d", + 16, + ), + 0x10ECCA1AAD7220B56A62008B35170BFD5E35885C4014A19F, + 0x04EB61984C6C12ADE3BC47F3C629ECE7AA0A033B9948D686, + 0x82BFA4E82C0DFE9274169B86694E76CE993FD83B5C60F325, + 0xA97685676C59A65DBDE002FE9D613431FB183E8006D05633, + False, + ), + ( + generator_192, + int( + "0x5a478f4084ddd1a7fea038aa9732a822106385797d02311aeef4d0264f824f" + "698df7a48cfb6b578cf3da416bc0799425bb491be5b5ecc37995b85b03420a98" + "f2c4dc5c31a69a379e9e322fbe706bbcaf0f77175e05cbb4fa162e0da82010a2" + "78461e3e974d137bc746d1880d6eb02aa95216014b37480d84b87f717bb13f76" + "e1", + 16, + ), + 0x6636653CB5B894CA65C448277B29DA3AD101C4C2300F7C04, + 0xFDF1CBB3FC3FD6A4F890B59E554544175FA77DBDBEB656C1, + 0xEAC2DDECDDFB79931A9C3D49C08DE0645C783A24CB365E1C, + 0x3549FEE3CFA7E5F93BC47D92D8BA100E881A2A93C22F8D50, + False, + ), + ( + generator_192, + int( + "0xc598774259a058fa65212ac57eaa4f52240e629ef4c310722088292d1d4af6" + "c39b49ce06ba77e4247b20637174d0bd67c9723feb57b5ead232b47ea452d5d7" + "a089f17c00b8b6767e434a5e16c231ba0efa718a340bf41d67ea2d295812ff1b" + "9277daacb8bc27b50ea5e6443bcf95ef4e9f5468fe78485236313d53d1c68f6b" + "a2", + 16, + ), + 0xA82BD718D01D354001148CD5F69B9EBF38FF6F21898F8AAA, + 0xE67CEEDE07FC2EBFAFD62462A51E4B6C6B3D5B537B7CAF3E, + 0x4D292486C620C3DE20856E57D3BB72FCDE4A73AD26376955, + 0xA85289591A6081D5728825520E62FF1C64F94235C04C7F95, + False, + ), + ( + generator_192, + int( + "0xca98ed9db081a07b7557f24ced6c7b9891269a95d2026747add9e9eb80638a" + "961cf9c71a1b9f2c29744180bd4c3d3db60f2243c5c0b7cc8a8d40a3f9a7fc91" + "0250f2187136ee6413ffc67f1a25e1c4c204fa9635312252ac0e0481d89b6d53" + "808f0c496ba87631803f6c572c1f61fa049737fdacce4adff757afed4f05beb6" + "58", + 16, + ), + 0x7D3B016B57758B160C4FCA73D48DF07AE3B6B30225126C2F, + 0x4AF3790D9775742BDE46F8DA876711BE1B65244B2B39E7EC, + 0x95F778F5F656511A5AB49A5D69DDD0929563C29CBC3A9E62, + 0x75C87FC358C251B4C83D2DD979FAAD496B539F9F2EE7A289, + False, + ), + ( + generator_192, + int( + "0x31dd9a54c8338bea06b87eca813d555ad1850fac9742ef0bbe40dad400e102" + "88acc9c11ea7dac79eb16378ebea9490e09536099f1b993e2653cd50240014c9" + "0a9c987f64545abc6a536b9bd2435eb5e911fdfde2f13be96ea36ad38df4ae9e" + "a387b29cced599af777338af2794820c9cce43b51d2112380a35802ab7e396c9" + "7a", + 16, + ), + 0x9362F28C4EF96453D8A2F849F21E881CD7566887DA8BEB4A, + 0xE64D26D8D74C48A024AE85D982EE74CD16046F4EE5333905, + 0xF3923476A296C88287E8DE914B0B324AD5A963319A4FE73B, + 0xF0BAEED7624ED00D15244D8BA2AEDE085517DBDEC8AC65F5, + True, + ), + ( + generator_192, + int( + "0xb2b94e4432267c92f9fdb9dc6040c95ffa477652761290d3c7de312283f645" + "0d89cc4aabe748554dfb6056b2d8e99c7aeaad9cdddebdee9dbc099839562d90" + "64e68e7bb5f3a6bba0749ca9a538181fc785553a4000785d73cc207922f63e8c" + "e1112768cb1de7b673aed83a1e4a74592f1268d8e2a4e9e63d414b5d442bd045" + "6d", + 16, + ), + 0xCC6FC032A846AAAC25533EB033522824F94E670FA997ECEF, + 0xE25463EF77A029ECCDA8B294FD63DD694E38D223D30862F1, + 0x066B1D07F3A40E679B620EDA7F550842A35C18B80C5EBE06, + 0xA0B0FB201E8F2DF65E2C4508EF303BDC90D934016F16B2DC, + False, + ), + ( + generator_192, + int( + "0x4366fcadf10d30d086911de30143da6f579527036937007b337f7282460eae" + "5678b15cccda853193ea5fc4bc0a6b9d7a31128f27e1214988592827520b214e" + "ed5052f7775b750b0c6b15f145453ba3fee24a085d65287e10509eb5d5f602c4" + "40341376b95c24e5c4727d4b859bfe1483d20538acdd92c7997fa9c614f0f839" + "d7", + 16, + ), + 0x955C908FE900A996F7E2089BEE2F6376830F76A19135E753, + 0xBA0C42A91D3847DE4A592A46DC3FDAF45A7CC709B90DE520, + 0x1F58AD77FC04C782815A1405B0925E72095D906CBF52A668, + 0xF2E93758B3AF75EDF784F05A6761C9B9A6043C66B845B599, + False, + ), + ( + generator_192, + int( + "0x543f8af57d750e33aa8565e0cae92bfa7a1ff78833093421c2942cadf99866" + "70a5ff3244c02a8225e790fbf30ea84c74720abf99cfd10d02d34377c3d3b412" + "69bea763384f372bb786b5846f58932defa68023136cd571863b304886e95e52" + "e7877f445b9364b3f06f3c28da12707673fecb4b8071de06b6e0a3c87da160ce" + "f3", + 16, + ), + 0x31F7FA05576D78A949B24812D4383107A9A45BB5FCCDD835, + 0x8DC0EB65994A90F02B5E19BD18B32D61150746C09107E76B, + 0xBE26D59E4E883DDE7C286614A767B31E49AD88789D3A78FF, + 0x8762CA831C1CE42DF77893C9B03119428E7A9B819B619068, + False, + ), + ( + generator_192, + int( + "0xd2e8454143ce281e609a9d748014dcebb9d0bc53adb02443a6aac2ffe6cb009f" + "387c346ecb051791404f79e902ee333ad65e5c8cb38dc0d1d39a8dc90add502357" + "2720e5b94b190d43dd0d7873397504c0c7aef2727e628eb6a74411f2e400c65670" + "716cb4a815dc91cbbfeb7cfe8c929e93184c938af2c078584da045e8f8d1", + 16, + ), + 0x66AA8EDBBDB5CF8E28CEB51B5BDA891CAE2DF84819FE25C0, + 0x0C6BC2F69030A7CE58D4A00E3B3349844784A13B8936F8DA, + 0xA4661E69B1734F4A71B788410A464B71E7FFE42334484F23, + 0x738421CF5E049159D69C57A915143E226CAC8355E149AFE9, + False, + ), + ( + generator_192, + int( + "0x6660717144040f3e2f95a4e25b08a7079c702a8b29babad5a19a87654bc5c5af" + "a261512a11b998a4fb36b5d8fe8bd942792ff0324b108120de86d63f65855e5461" + "184fc96a0a8ffd2ce6d5dfb0230cbbdd98f8543e361b3205f5da3d500fdc8bac6d" + "b377d75ebef3cb8f4d1ff738071ad0938917889250b41dd1d98896ca06fb", + 16, + ), + 0xBCFACF45139B6F5F690A4C35A5FFFA498794136A2353FC77, + 0x6F4A6C906316A6AFC6D98FE1F0399D056F128FE0270B0F22, + 0x9DB679A3DAFE48F7CCAD122933ACFE9DA0970B71C94C21C1, + 0x984C2DB99827576C0A41A5DA41E07D8CC768BC82F18C9DA9, + False, + ), +] + + +@pytest.mark.parametrize("gen,msg,qx,qy,r,s,expected", CURVE_192_KATS) +def test_signature_validity(gen, msg, qx, qy, r, s, expected): + """ + `msg` = message, `qx` and `qy` represent the base point on + elliptic curve of `gen`, `r` and `s` are the signature, and + `expected` is True iff the signature is expected to be valid.""" + pubk = Public_key(gen, ellipticcurve.Point(gen.curve(), qx, qy)) + assert expected == pubk.verifies(digest_integer(msg), Signature(r, s)) + + +@pytest.mark.parametrize( + "gen,msg,qx,qy,r,s,expected", [x for x in CURVE_192_KATS if x[6]] +) +def test_pk_recovery(gen, msg, r, s, qx, qy, expected): + del expected + sign = Signature(r, s) + pks = sign.recover_public_keys(digest_integer(msg), gen) + + assert pks + + # Test if the signature is valid for all found public keys + for pk in pks: + q = pk.point + test_signature_validity(gen, msg, q.x(), q.y(), r, s, True) + + # Test if the original public key is in the set of found keys + original_q = ellipticcurve.Point(gen.curve(), qx, qy) + points = [pk.point for pk in pks] + assert original_q in points + + +@st.composite +def st_random_gen_key_msg_nonce(draw): + """Hypothesis strategy for test_sig_verify().""" + name_gen = { + "generator_192": generator_192, + "generator_224": generator_224, + "generator_256": generator_256, + "generator_secp256k1": generator_secp256k1, + "generator_384": generator_384, + "generator_521": generator_521, + } + name = draw(st.sampled_from(sorted(name_gen.keys()))) + note("Generator used: {0}".format(name)) + generator = name_gen[name] + order = int(generator.order()) + + key = draw(st.integers(min_value=1, max_value=order)) + msg = draw(st.integers(min_value=1, max_value=order)) + nonce = draw( + st.integers(min_value=1, max_value=order + 1) + | st.integers(min_value=order >> 1, max_value=order) + ) + return generator, key, msg, nonce + + +SIG_VER_SETTINGS = dict(HYP_SETTINGS) +SIG_VER_SETTINGS["max_examples"] = 10 + + +@settings(**SIG_VER_SETTINGS) +@example((generator_224, 4, 1, 1)) +@given(st_random_gen_key_msg_nonce()) +def test_sig_verify(args): + """ + Check if signing and verification works for arbitrary messages and + that signatures for other messages are rejected. + """ + generator, sec_mult, msg, nonce = args + + pubkey = Public_key(generator, generator * sec_mult) + privkey = Private_key(pubkey, sec_mult) + + signature = privkey.sign(msg, nonce) + + assert pubkey.verifies(msg, signature) + + assert not pubkey.verifies(msg - 1, signature) + + +def test_int_to_string_with_zero(): + assert int_to_string(0) == b"\x00" diff --git a/venv/lib/python3.10/site-packages/ecdsa/test_ellipticcurve.py b/venv/lib/python3.10/site-packages/ecdsa/test_ellipticcurve.py new file mode 100644 index 0000000..85faef4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/test_ellipticcurve.py @@ -0,0 +1,199 @@ +import pytest + +try: + import unittest2 as unittest +except ImportError: + import unittest +from hypothesis import given, settings +import hypothesis.strategies as st + +try: + from hypothesis import HealthCheck + + HC_PRESENT = True +except ImportError: # pragma: no cover + HC_PRESENT = False +from .numbertheory import inverse_mod +from .ellipticcurve import CurveFp, INFINITY, Point + + +HYP_SETTINGS = {} +if HC_PRESENT: # pragma: no branch + HYP_SETTINGS["suppress_health_check"] = [HealthCheck.too_slow] + HYP_SETTINGS["deadline"] = 5000 + + +# NIST Curve P-192: +p = 6277101735386680763835789423207666416083908700390324961279 +r = 6277101735386680763835789423176059013767194773182842284081 +# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5 +# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65 +b = 0x64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1 +Gx = 0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012 +Gy = 0x07192B95FFC8DA78631011ED6B24CDD573F977A11E794811 + +c192 = CurveFp(p, -3, b) +p192 = Point(c192, Gx, Gy, r) + +c_23 = CurveFp(23, 1, 1) +g_23 = Point(c_23, 13, 7, 7) + + +HYP_SLOW_SETTINGS = dict(HYP_SETTINGS) +HYP_SLOW_SETTINGS["max_examples"] = 10 + + +@settings(**HYP_SLOW_SETTINGS) +@given(st.integers(min_value=1, max_value=r + 1)) +def test_p192_mult_tests(multiple): + inv_m = inverse_mod(multiple, r) + + p1 = p192 * multiple + assert p1 * inv_m == p192 + + +def add_n_times(point, n): + ret = INFINITY + i = 0 + while i <= n: + yield ret + ret = ret + point + i += 1 + + +# From X9.62 I.1 (p. 96): +@pytest.mark.parametrize( + "p, m, check", + [(g_23, n, exp) for n, exp in enumerate(add_n_times(g_23, 8))], + ids=["g_23 test with mult {0}".format(i) for i in range(9)], +) +def test_add_and_mult_equivalence(p, m, check): + assert p * m == check + + +class TestCurve(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.c_23 = CurveFp(23, 1, 1) + + def test_equality_curves(self): + self.assertEqual(self.c_23, CurveFp(23, 1, 1)) + + def test_inequality_curves(self): + c192 = CurveFp(p, -3, b) + self.assertNotEqual(self.c_23, c192) + + def test_usability_in_a_hashed_collection_curves(self): + {self.c_23: None} + + def test_hashability_curves(self): + hash(self.c_23) + + def test_conflation_curves(self): + ne1, ne2, ne3 = CurveFp(24, 1, 1), CurveFp(23, 2, 1), CurveFp(23, 1, 2) + eq1, eq2, eq3 = CurveFp(23, 1, 1), CurveFp(23, 1, 1), self.c_23 + self.assertEqual(len(set((c_23, eq1, eq2, eq3))), 1) + self.assertEqual(len(set((c_23, ne1, ne2, ne3))), 4) + self.assertDictEqual({c_23: None}, {eq1: None}) + self.assertIn(eq2, {eq3: None}) + + +class TestPoint(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.c_23 = CurveFp(23, 1, 1) + cls.g_23 = Point(cls.c_23, 13, 7, 7) + + p = 6277101735386680763835789423207666416083908700390324961279 + r = 6277101735386680763835789423176059013767194773182842284081 + # s = 0x3045ae6fc8422f64ed579528d38120eae12196d5 + # c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65 + b = 0x64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1 + Gx = 0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012 + Gy = 0x07192B95FFC8DA78631011ED6B24CDD573F977A11E794811 + + cls.c192 = CurveFp(p, -3, b) + cls.p192 = Point(cls.c192, Gx, Gy, r) + + def test_p192(self): + # Checking against some sample computations presented + # in X9.62: + d = 651056770906015076056810763456358567190100156695615665659 + Q = d * self.p192 + self.assertEqual( + Q.x(), 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5 + ) + + k = 6140507067065001063065065565667405560006161556565665656654 + R = k * self.p192 + self.assertEqual( + R.x(), 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD + ) + self.assertEqual( + R.y(), 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 + ) + + u1 = 2563697409189434185194736134579731015366492496392189760599 + u2 = 6266643813348617967186477710235785849136406323338782220568 + temp = u1 * self.p192 + u2 * Q + self.assertEqual( + temp.x(), 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD + ) + self.assertEqual( + temp.y(), 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 + ) + + def test_double_infinity(self): + p1 = INFINITY + p3 = p1.double() + self.assertEqual(p1, p3) + self.assertEqual(p3.x(), p1.x()) + self.assertEqual(p3.y(), p3.y()) + + def test_double(self): + x1, y1, x3, y3 = (3, 10, 7, 12) + + p1 = Point(self.c_23, x1, y1) + p3 = p1.double() + self.assertEqual(p3.x(), x3) + self.assertEqual(p3.y(), y3) + + def test_multiply(self): + x1, y1, m, x3, y3 = (3, 10, 2, 7, 12) + p1 = Point(self.c_23, x1, y1) + p3 = p1 * m + self.assertEqual(p3.x(), x3) + self.assertEqual(p3.y(), y3) + + # Trivial tests from X9.62 B.3: + def test_add(self): + """We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3).""" + + x1, y1, x2, y2, x3, y3 = (3, 10, 9, 7, 17, 20) + p1 = Point(self.c_23, x1, y1) + p2 = Point(self.c_23, x2, y2) + p3 = p1 + p2 + self.assertEqual(p3.x(), x3) + self.assertEqual(p3.y(), y3) + + def test_add_as_double(self): + """We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3).""" + + x1, y1, x2, y2, x3, y3 = (3, 10, 3, 10, 7, 12) + p1 = Point(self.c_23, x1, y1) + p2 = Point(self.c_23, x2, y2) + p3 = p1 + p2 + self.assertEqual(p3.x(), x3) + self.assertEqual(p3.y(), y3) + + def test_equality_points(self): + self.assertEqual(self.g_23, Point(self.c_23, 13, 7, 7)) + + def test_inequality_points(self): + c = CurveFp(100, -3, 100) + p = Point(c, 100, 100, 100) + self.assertNotEqual(self.g_23, p) + + def test_inequality_points_diff_types(self): + c = CurveFp(100, -3, 100) + self.assertNotEqual(self.g_23, c) diff --git a/venv/lib/python3.10/site-packages/ecdsa/test_jacobi.py b/venv/lib/python3.10/site-packages/ecdsa/test_jacobi.py new file mode 100644 index 0000000..67f32fa --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/test_jacobi.py @@ -0,0 +1,657 @@ +import pickle + +try: + import unittest2 as unittest +except ImportError: + import unittest + +import os +import sys +import signal +import pytest +import threading +import platform +import hypothesis.strategies as st +from hypothesis import given, assume, settings, example + +from .ellipticcurve import CurveFp, PointJacobi, INFINITY +from .ecdsa import ( + generator_256, + curve_256, + generator_224, + generator_brainpoolp160r1, + curve_brainpoolp160r1, + generator_112r2, +) +from .numbertheory import inverse_mod +from .util import randrange + + +NO_OLD_SETTINGS = {} +if sys.version_info > (2, 7): # pragma: no branch + NO_OLD_SETTINGS["deadline"] = 5000 + + +class TestJacobi(unittest.TestCase): + def test___init__(self): + curve = object() + x = 2 + y = 3 + z = 1 + order = 4 + pj = PointJacobi(curve, x, y, z, order) + + self.assertEqual(pj.order(), order) + self.assertIs(pj.curve(), curve) + self.assertEqual(pj.x(), x) + self.assertEqual(pj.y(), y) + + def test_add_with_different_curves(self): + p_a = PointJacobi.from_affine(generator_256) + p_b = PointJacobi.from_affine(generator_224) + + with self.assertRaises(ValueError): + p_a + p_b + + def test_compare_different_curves(self): + self.assertNotEqual(generator_256, generator_224) + + def test_equality_with_non_point(self): + pj = PointJacobi.from_affine(generator_256) + + self.assertNotEqual(pj, "value") + + def test_conversion(self): + pj = PointJacobi.from_affine(generator_256) + pw = pj.to_affine() + + self.assertEqual(generator_256, pw) + + def test_single_double(self): + pj = PointJacobi.from_affine(generator_256) + pw = generator_256.double() + + pj = pj.double() + + self.assertEqual(pj.x(), pw.x()) + self.assertEqual(pj.y(), pw.y()) + + def test_double_with_zero_point(self): + pj = PointJacobi(curve_256, 0, 0, 1) + + pj = pj.double() + + self.assertIs(pj, INFINITY) + + def test_double_with_zero_equivalent_point(self): + pj = PointJacobi(curve_256, 0, curve_256.p(), 1) + + pj = pj.double() + + self.assertIs(pj, INFINITY) + + def test_double_with_zero_equivalent_point_non_1_z(self): + pj = PointJacobi(curve_256, 0, curve_256.p(), 2) + + pj = pj.double() + + self.assertIs(pj, INFINITY) + + def test_compare_with_affine_point(self): + pj = PointJacobi.from_affine(generator_256) + pa = pj.to_affine() + + self.assertEqual(pj, pa) + self.assertEqual(pa, pj) + + def test_to_affine_with_zero_point(self): + pj = PointJacobi(curve_256, 0, 0, 1) + + pa = pj.to_affine() + + self.assertIs(pa, INFINITY) + + def test_add_with_affine_point(self): + pj = PointJacobi.from_affine(generator_256) + pa = pj.to_affine() + + s = pj + pa + + self.assertEqual(s, pj.double()) + + def test_radd_with_affine_point(self): + pj = PointJacobi.from_affine(generator_256) + pa = pj.to_affine() + + s = pa + pj + + self.assertEqual(s, pj.double()) + + def test_add_with_infinity(self): + pj = PointJacobi.from_affine(generator_256) + + s = pj + INFINITY + + self.assertEqual(s, pj) + + def test_add_zero_point_to_affine(self): + pa = PointJacobi.from_affine(generator_256).to_affine() + pj = PointJacobi(curve_256, 0, 0, 1) + + s = pj + pa + + self.assertIs(s, pa) + + def test_multiply_by_zero(self): + pj = PointJacobi.from_affine(generator_256) + + pj = pj * 0 + + self.assertIs(pj, INFINITY) + + def test_zero_point_multiply_by_one(self): + pj = PointJacobi(curve_256, 0, 0, 1) + + pj = pj * 1 + + self.assertIs(pj, INFINITY) + + def test_multiply_by_one(self): + pj = PointJacobi.from_affine(generator_256) + pw = generator_256 * 1 + + pj = pj * 1 + + self.assertEqual(pj.x(), pw.x()) + self.assertEqual(pj.y(), pw.y()) + + def test_multiply_by_two(self): + pj = PointJacobi.from_affine(generator_256) + pw = generator_256 * 2 + + pj = pj * 2 + + self.assertEqual(pj.x(), pw.x()) + self.assertEqual(pj.y(), pw.y()) + + def test_rmul_by_two(self): + pj = PointJacobi.from_affine(generator_256) + pw = generator_256 * 2 + + pj = 2 * pj + + self.assertEqual(pj, pw) + + def test_compare_non_zero_with_infinity(self): + pj = PointJacobi.from_affine(generator_256) + + self.assertNotEqual(pj, INFINITY) + + def test_compare_zero_point_with_infinity(self): + pj = PointJacobi(curve_256, 0, 0, 1) + + self.assertEqual(pj, INFINITY) + + def test_compare_double_with_multiply(self): + pj = PointJacobi.from_affine(generator_256) + dbl = pj.double() + mlpl = pj * 2 + + self.assertEqual(dbl, mlpl) + + @settings(max_examples=10) + @given( + st.integers( + min_value=0, max_value=int(generator_brainpoolp160r1.order()) + ) + ) + def test_multiplications(self, mul): + pj = PointJacobi.from_affine(generator_brainpoolp160r1) + pw = pj.to_affine() * mul + + pj = pj * mul + + self.assertEqual((pj.x(), pj.y()), (pw.x(), pw.y())) + self.assertEqual(pj, pw) + + @settings(max_examples=10) + @given( + st.integers( + min_value=0, max_value=int(generator_brainpoolp160r1.order()) + ) + ) + @example(0) + @example(int(generator_brainpoolp160r1.order())) + def test_precompute(self, mul): + precomp = generator_brainpoolp160r1 + self.assertTrue(precomp._PointJacobi__precompute) + pj = PointJacobi.from_affine(generator_brainpoolp160r1) + + a = precomp * mul + b = pj * mul + + self.assertEqual(a, b) + + @settings(max_examples=10) + @given( + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order()) + ), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order()) + ), + ) + @example(3, 3) + def test_add_scaled_points(self, a_mul, b_mul): + j_g = PointJacobi.from_affine(generator_brainpoolp160r1) + a = PointJacobi.from_affine(j_g * a_mul) + b = PointJacobi.from_affine(j_g * b_mul) + + c = a + b + + self.assertEqual(c, j_g * (a_mul + b_mul)) + + @settings(max_examples=10) + @given( + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order()) + ), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order()) + ), + st.integers(min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1)), + ) + def test_add_one_scaled_point(self, a_mul, b_mul, new_z): + j_g = PointJacobi.from_affine(generator_brainpoolp160r1) + a = PointJacobi.from_affine(j_g * a_mul) + b = PointJacobi.from_affine(j_g * b_mul) + + p = curve_brainpoolp160r1.p() + + assume(inverse_mod(new_z, p)) + + new_zz = new_z * new_z % p + + b = PointJacobi( + curve_brainpoolp160r1, + b.x() * new_zz % p, + b.y() * new_zz * new_z % p, + new_z, + ) + + c = a + b + + self.assertEqual(c, j_g * (a_mul + b_mul)) + + @settings(max_examples=10) + @given( + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order()) + ), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order()) + ), + st.integers(min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1)), + ) + @example(1, 1, 1) + @example(3, 3, 3) + @example(2, int(generator_brainpoolp160r1.order() - 2), 1) + @example(2, int(generator_brainpoolp160r1.order() - 2), 3) + def test_add_same_scale_points(self, a_mul, b_mul, new_z): + j_g = PointJacobi.from_affine(generator_brainpoolp160r1) + a = PointJacobi.from_affine(j_g * a_mul) + b = PointJacobi.from_affine(j_g * b_mul) + + p = curve_brainpoolp160r1.p() + + assume(inverse_mod(new_z, p)) + + new_zz = new_z * new_z % p + + a = PointJacobi( + curve_brainpoolp160r1, + a.x() * new_zz % p, + a.y() * new_zz * new_z % p, + new_z, + ) + b = PointJacobi( + curve_brainpoolp160r1, + b.x() * new_zz % p, + b.y() * new_zz * new_z % p, + new_z, + ) + + c = a + b + + self.assertEqual(c, j_g * (a_mul + b_mul)) + + def test_add_same_scale_points_static(self): + j_g = generator_brainpoolp160r1 + p = curve_brainpoolp160r1.p() + a = j_g * 11 + a.scale() + z1 = 13 + x = PointJacobi( + curve_brainpoolp160r1, + a.x() * z1 ** 2 % p, + a.y() * z1 ** 3 % p, + z1, + ) + y = PointJacobi( + curve_brainpoolp160r1, + a.x() * z1 ** 2 % p, + a.y() * z1 ** 3 % p, + z1, + ) + + c = a + a + + self.assertEqual(c, x + y) + + @settings(max_examples=14) + @given( + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order()) + ), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order()) + ), + st.lists( + st.integers( + min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1) + ), + min_size=2, + max_size=2, + unique=True, + ), + ) + @example(2, 2, [2, 1]) + @example(2, 2, [2, 3]) + @example(2, int(generator_brainpoolp160r1.order() - 2), [2, 3]) + @example(2, int(generator_brainpoolp160r1.order() - 2), [2, 1]) + def test_add_different_scale_points(self, a_mul, b_mul, new_z): + j_g = PointJacobi.from_affine(generator_brainpoolp160r1) + a = PointJacobi.from_affine(j_g * a_mul) + b = PointJacobi.from_affine(j_g * b_mul) + + p = curve_brainpoolp160r1.p() + + assume(inverse_mod(new_z[0], p)) + assume(inverse_mod(new_z[1], p)) + + new_zz0 = new_z[0] * new_z[0] % p + new_zz1 = new_z[1] * new_z[1] % p + + a = PointJacobi( + curve_brainpoolp160r1, + a.x() * new_zz0 % p, + a.y() * new_zz0 * new_z[0] % p, + new_z[0], + ) + b = PointJacobi( + curve_brainpoolp160r1, + b.x() * new_zz1 % p, + b.y() * new_zz1 * new_z[1] % p, + new_z[1], + ) + + c = a + b + + self.assertEqual(c, j_g * (a_mul + b_mul)) + + def test_add_different_scale_points_static(self): + j_g = generator_brainpoolp160r1 + p = curve_brainpoolp160r1.p() + a = j_g * 11 + a.scale() + z1 = 13 + x = PointJacobi( + curve_brainpoolp160r1, + a.x() * z1 ** 2 % p, + a.y() * z1 ** 3 % p, + z1, + ) + z2 = 29 + y = PointJacobi( + curve_brainpoolp160r1, + a.x() * z2 ** 2 % p, + a.y() * z2 ** 3 % p, + z2, + ) + + c = a + a + + self.assertEqual(c, x + y) + + def test_add_point_3_times(self): + j_g = PointJacobi.from_affine(generator_256) + + self.assertEqual(j_g * 3, j_g + j_g + j_g) + + def test_mul_without_order(self): + j_g = PointJacobi(curve_256, generator_256.x(), generator_256.y(), 1) + + self.assertEqual(j_g * generator_256.order(), INFINITY) + + def test_mul_add_inf(self): + j_g = PointJacobi.from_affine(generator_256) + + self.assertEqual(j_g, j_g.mul_add(1, INFINITY, 1)) + + def test_mul_add_same(self): + j_g = PointJacobi.from_affine(generator_256) + + self.assertEqual(j_g * 2, j_g.mul_add(1, j_g, 1)) + + def test_mul_add_precompute(self): + j_g = PointJacobi.from_affine(generator_brainpoolp160r1, True) + b = PointJacobi.from_affine(j_g * 255, True) + + self.assertEqual(j_g * 256, j_g + b) + self.assertEqual(j_g * (5 + 255 * 7), j_g * 5 + b * 7) + self.assertEqual(j_g * (5 + 255 * 7), j_g.mul_add(5, b, 7)) + + def test_mul_add_precompute_large(self): + j_g = PointJacobi.from_affine(generator_brainpoolp160r1, True) + b = PointJacobi.from_affine(j_g * 255, True) + + self.assertEqual(j_g * 256, j_g + b) + self.assertEqual( + j_g * (0xFF00 + 255 * 0xF0F0), j_g * 0xFF00 + b * 0xF0F0 + ) + self.assertEqual( + j_g * (0xFF00 + 255 * 0xF0F0), j_g.mul_add(0xFF00, b, 0xF0F0) + ) + + def test_mul_add_to_mul(self): + j_g = PointJacobi.from_affine(generator_256) + + a = j_g * 3 + b = j_g.mul_add(2, j_g, 1) + + self.assertEqual(a, b) + + def test_mul_add_differnt(self): + j_g = PointJacobi.from_affine(generator_256) + + w_a = j_g * 2 + + self.assertEqual(j_g.mul_add(1, w_a, 1), j_g * 3) + + def test_mul_add_slightly_different(self): + j_g = PointJacobi.from_affine(generator_256) + + w_a = j_g * 2 + w_b = j_g * 3 + + self.assertEqual(w_a.mul_add(1, w_b, 3), w_a * 1 + w_b * 3) + + def test_mul_add(self): + j_g = PointJacobi.from_affine(generator_256) + + w_a = generator_256 * 255 + w_b = generator_256 * (0xA8 * 0xF0) + j_b = j_g * 0xA8 + + ret = j_g.mul_add(255, j_b, 0xF0) + + self.assertEqual(ret.to_affine(), w_a + w_b) + + def test_mul_add_large(self): + j_g = PointJacobi.from_affine(generator_256) + b = PointJacobi.from_affine(j_g * 255) + + self.assertEqual(j_g * 256, j_g + b) + self.assertEqual( + j_g * (0xFF00 + 255 * 0xF0F0), j_g * 0xFF00 + b * 0xF0F0 + ) + self.assertEqual( + j_g * (0xFF00 + 255 * 0xF0F0), j_g.mul_add(0xFF00, b, 0xF0F0) + ) + + def test_mul_add_with_infinity_as_result(self): + j_g = PointJacobi.from_affine(generator_256) + + order = generator_256.order() + + b = PointJacobi.from_affine(generator_256 * 256) + + self.assertEqual(j_g.mul_add(order % 256, b, order // 256), INFINITY) + + def test_mul_add_without_order(self): + j_g = PointJacobi(curve_256, generator_256.x(), generator_256.y(), 1) + + order = generator_256.order() + + w_b = generator_256 * 34 + w_b.scale() + + b = PointJacobi(curve_256, w_b.x(), w_b.y(), 1) + + self.assertEqual(j_g.mul_add(order % 34, b, order // 34), INFINITY) + + def test_mul_add_with_doubled_negation_of_itself(self): + j_g = PointJacobi.from_affine(generator_256 * 17) + + dbl_neg = 2 * (-j_g) + + self.assertEqual(j_g.mul_add(4, dbl_neg, 2), INFINITY) + + def test_equality(self): + pj1 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) + pj2 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) + self.assertEqual(pj1, pj2) + + def test_equality_with_invalid_object(self): + j_g = PointJacobi.from_affine(generator_256) + + self.assertNotEqual(j_g, 12) + + def test_equality_with_wrong_curves(self): + p_a = PointJacobi.from_affine(generator_256) + p_b = PointJacobi.from_affine(generator_224) + + self.assertNotEqual(p_a, p_b) + + def test_pickle(self): + pj = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) + self.assertEqual(pickle.loads(pickle.dumps(pj)), pj) + + @settings(**NO_OLD_SETTINGS) + @given(st.integers(min_value=1, max_value=10)) + def test_multithreading(self, thread_num): + # ensure that generator's precomputation table is filled + generator_112r2 * 2 + + # create a fresh point that doesn't have a filled precomputation table + gen = generator_112r2 + gen = PointJacobi(gen.curve(), gen.x(), gen.y(), 1, gen.order(), True) + + self.assertEqual(gen._PointJacobi__precompute, []) + + def runner(generator): + order = generator.order() + for _ in range(10): + generator * randrange(order) + + threads = [] + for _ in range(thread_num): + threads.append(threading.Thread(target=runner, args=(gen,))) + + for t in threads: + t.start() + + runner(gen) + + for t in threads: + t.join() + + self.assertEqual( + gen._PointJacobi__precompute, + generator_112r2._PointJacobi__precompute, + ) + + @pytest.mark.skipif( + platform.system() == "Windows", + reason="there are no signals on Windows", + ) + def test_multithreading_with_interrupts(self): + thread_num = 10 + # ensure that generator's precomputation table is filled + generator_112r2 * 2 + + # create a fresh point that doesn't have a filled precomputation table + gen = generator_112r2 + gen = PointJacobi(gen.curve(), gen.x(), gen.y(), 1, gen.order(), True) + + self.assertEqual(gen._PointJacobi__precompute, []) + + def runner(generator): + order = generator.order() + for _ in range(50): + generator * randrange(order) + + def interrupter(barrier_start, barrier_end, lock_exit): + # wait until MainThread can handle KeyboardInterrupt + barrier_start.release() + barrier_end.acquire() + os.kill(os.getpid(), signal.SIGINT) + lock_exit.release() + + threads = [] + for _ in range(thread_num): + threads.append(threading.Thread(target=runner, args=(gen,))) + + barrier_start = threading.Lock() + barrier_start.acquire() + barrier_end = threading.Lock() + barrier_end.acquire() + lock_exit = threading.Lock() + lock_exit.acquire() + + threads.append( + threading.Thread( + target=interrupter, + args=(barrier_start, barrier_end, lock_exit), + ) + ) + + for t in threads: + t.start() + + with self.assertRaises(KeyboardInterrupt): + # signal to interrupter that we can now handle the signal + barrier_start.acquire() + barrier_end.release() + runner(gen) + # use the lock to ensure we never go past the scope of + # assertRaises before the os.kill is called + lock_exit.acquire() + + for t in threads: + t.join() + + self.assertEqual( + gen._PointJacobi__precompute, + generator_112r2._PointJacobi__precompute, + ) diff --git a/venv/lib/python3.10/site-packages/ecdsa/test_keys.py b/venv/lib/python3.10/site-packages/ecdsa/test_keys.py new file mode 100644 index 0000000..0cf7cc9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/test_keys.py @@ -0,0 +1,652 @@ +try: + import unittest2 as unittest +except ImportError: + import unittest + +try: + buffer +except NameError: + buffer = memoryview + +import os +import array +import pytest +import hashlib + +from .keys import VerifyingKey, SigningKey, MalformedPointError +from .der import unpem, UnexpectedDER +from .util import ( + sigencode_string, + sigencode_der, + sigencode_strings, + sigdecode_string, + sigdecode_der, + sigdecode_strings, +) +from .curves import NIST256p, Curve, BRAINPOOLP160r1 +from .ellipticcurve import Point, PointJacobi, CurveFp, INFINITY +from .ecdsa import generator_brainpoolp160r1 + + +class TestVerifyingKeyFromString(unittest.TestCase): + """ + Verify that ecdsa.keys.VerifyingKey.from_string() can be used with + bytes-like objects + """ + + @classmethod + def setUpClass(cls): + cls.key_bytes = ( + b"\x04L\xa2\x95\xdb\xc7Z\xd7\x1f\x93\nz\xcf\x97\xcf" + b"\xd7\xc2\xd9o\xfe8}X!\xae\xd4\xfah\xfa^\rpI\xba\xd1" + b"Y\xfb\x92xa\xebo+\x9cG\xfav\xca" + ) + cls.vk = VerifyingKey.from_string(cls.key_bytes) + + def test_bytes(self): + self.assertIsNotNone(self.vk) + self.assertIsInstance(self.vk, VerifyingKey) + self.assertEqual( + self.vk.pubkey.point.x(), + 105419898848891948935835657980914000059957975659675736097, + ) + self.assertEqual( + self.vk.pubkey.point.y(), + 4286866841217412202667522375431381222214611213481632495306, + ) + + def test_bytes_memoryview(self): + vk = VerifyingKey.from_string(buffer(self.key_bytes)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytearray(self): + vk = VerifyingKey.from_string(bytearray(self.key_bytes)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytesarray_memoryview(self): + vk = VerifyingKey.from_string(buffer(bytearray(self.key_bytes))) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_array_array_of_bytes(self): + arr = array.array("B", self.key_bytes) + vk = VerifyingKey.from_string(arr) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_array_array_of_bytes_memoryview(self): + arr = array.array("B", self.key_bytes) + vk = VerifyingKey.from_string(buffer(arr)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_array_array_of_ints(self): + arr = array.array("I", self.key_bytes) + vk = VerifyingKey.from_string(arr) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_array_array_of_ints_memoryview(self): + arr = array.array("I", self.key_bytes) + vk = VerifyingKey.from_string(buffer(arr)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytes_uncompressed(self): + vk = VerifyingKey.from_string(b"\x04" + self.key_bytes) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytearray_uncompressed(self): + vk = VerifyingKey.from_string(bytearray(b"\x04" + self.key_bytes)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytes_compressed(self): + vk = VerifyingKey.from_string(b"\x02" + self.key_bytes[:24]) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytearray_compressed(self): + vk = VerifyingKey.from_string(bytearray(b"\x02" + self.key_bytes[:24])) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + +class TestVerifyingKeyFromDer(unittest.TestCase): + """ + Verify that ecdsa.keys.VerifyingKey.from_der() can be used with + bytes-like objects. + """ + + @classmethod + def setUpClass(cls): + prv_key_str = ( + "-----BEGIN EC PRIVATE KEY-----\n" + "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" + "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" + "bA==\n" + "-----END EC PRIVATE KEY-----\n" + ) + key_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MEkwEwYHKoZIzj0CAQYIKoZIzj0DAQEDMgAEuIF30ITvF/XkVjlAgCg2D59ZtKTX\n" + "Jk5i2gZR3OR6NaTFtFz1FZNCOotVe5wgmfNs\n" + "-----END PUBLIC KEY-----\n" + ) + cls.key_pem = key_str + + cls.key_bytes = unpem(key_str) + assert isinstance(cls.key_bytes, bytes) + cls.vk = VerifyingKey.from_pem(key_str) + cls.sk = SigningKey.from_pem(prv_key_str) + + key_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4H3iRbG4TSrsSRb/gusPQB/4YcN8\n" + "Poqzgjau4kfxBPyZimeRfuY/9g/wMmPuhGl4BUve51DsnKJFRr8psk0ieA==\n" + "-----END PUBLIC KEY-----\n" + ) + cls.vk2 = VerifyingKey.from_pem(key_str) + + cls.sk2 = SigningKey.generate(vk.curve) + + def test_load_key_with_explicit_parameters(self): + pub_key_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MIIBSzCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAA\n" + "AAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA////\n" + "///////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSd\n" + "NgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5\n" + "RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA\n" + "//////////+85vqtpxeehPO5ysL8YyVRAgEBA0IABIr1UkgYs5jmbFc7it1/YI2X\n" + "T//IlaEjMNZft1owjqpBYH2ErJHk4U5Pp4WvWq1xmHwIZlsH7Ig4KmefCfR6SmU=\n" + "-----END PUBLIC KEY-----" + ) + pk = VerifyingKey.from_pem(pub_key_str) + + pk_exp = VerifyingKey.from_string( + b"\x04\x8a\xf5\x52\x48\x18\xb3\x98\xe6\x6c\x57\x3b\x8a\xdd\x7f" + b"\x60\x8d\x97\x4f\xff\xc8\x95\xa1\x23\x30\xd6\x5f\xb7\x5a\x30" + b"\x8e\xaa\x41\x60\x7d\x84\xac\x91\xe4\xe1\x4e\x4f\xa7\x85\xaf" + b"\x5a\xad\x71\x98\x7c\x08\x66\x5b\x07\xec\x88\x38\x2a\x67\x9f" + b"\x09\xf4\x7a\x4a\x65", + curve=NIST256p, + ) + self.assertEqual(pk, pk_exp) + + def test_load_key_with_explicit_with_explicit_disabled(self): + pub_key_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MIIBSzCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAA\n" + "AAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA////\n" + "///////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSd\n" + "NgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5\n" + "RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA\n" + "//////////+85vqtpxeehPO5ysL8YyVRAgEBA0IABIr1UkgYs5jmbFc7it1/YI2X\n" + "T//IlaEjMNZft1owjqpBYH2ErJHk4U5Pp4WvWq1xmHwIZlsH7Ig4KmefCfR6SmU=\n" + "-----END PUBLIC KEY-----" + ) + with self.assertRaises(UnexpectedDER): + VerifyingKey.from_pem( + pub_key_str, valid_curve_encodings=["named_curve"] + ) + + def test_load_key_with_disabled_format(self): + with self.assertRaises(MalformedPointError) as e: + VerifyingKey.from_der(self.key_bytes, valid_encodings=["raw"]) + + self.assertIn("enabled (raw) encodings", str(e.exception)) + + def test_custom_hashfunc(self): + vk = VerifyingKey.from_der(self.key_bytes, hashlib.sha256) + + self.assertIs(vk.default_hashfunc, hashlib.sha256) + + def test_from_pem_with_custom_hashfunc(self): + vk = VerifyingKey.from_pem(self.key_pem, hashlib.sha256) + + self.assertIs(vk.default_hashfunc, hashlib.sha256) + + def test_bytes(self): + vk = VerifyingKey.from_der(self.key_bytes) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytes_memoryview(self): + vk = VerifyingKey.from_der(buffer(self.key_bytes)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytearray(self): + vk = VerifyingKey.from_der(bytearray(self.key_bytes)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytesarray_memoryview(self): + vk = VerifyingKey.from_der(buffer(bytearray(self.key_bytes))) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_array_array_of_bytes(self): + arr = array.array("B", self.key_bytes) + vk = VerifyingKey.from_der(arr) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_array_array_of_bytes_memoryview(self): + arr = array.array("B", self.key_bytes) + vk = VerifyingKey.from_der(buffer(arr)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_equality_on_verifying_keys(self): + self.assertEqual(self.vk, self.sk.get_verifying_key()) + + def test_inequality_on_verifying_keys(self): + self.assertNotEqual(self.vk, self.vk2) + + def test_inequality_on_verifying_keys_not_implemented(self): + self.assertNotEqual(self.vk, None) + + def test_VerifyingKey_inequality_on_same_curve(self): + self.assertNotEqual(self.vk, self.sk2.verifying_key) + + def test_SigningKey_inequality_on_same_curve(self): + self.assertNotEqual(self.sk, self.sk2) + + def test_inequality_on_wrong_types(self): + self.assertNotEqual(self.vk, self.sk) + + def test_from_public_point_old(self): + pj = self.vk.pubkey.point + point = Point(pj.curve(), pj.x(), pj.y()) + + vk = VerifyingKey.from_public_point(point, self.vk.curve) + + self.assertEqual(vk, self.vk) + + +class TestSigningKey(unittest.TestCase): + """ + Verify that ecdsa.keys.SigningKey.from_der() can be used with + bytes-like objects. + """ + + @classmethod + def setUpClass(cls): + prv_key_str = ( + "-----BEGIN EC PRIVATE KEY-----\n" + "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" + "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" + "bA==\n" + "-----END EC PRIVATE KEY-----\n" + ) + cls.sk1 = SigningKey.from_pem(prv_key_str) + + prv_key_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MG8CAQAwEwYHKoZIzj0CAQYIKoZIzj0DAQEEVTBTAgEBBBheyEIL1u+SUqlC6YkE\n" + "PKKfVh+lJXcOscWhNAMyAAS4gXfQhO8X9eRWOUCAKDYPn1m0pNcmTmLaBlHc5Ho1\n" + "pMW0XPUVk0I6i1V7nCCZ82w=\n" + "-----END PRIVATE KEY-----\n" + ) + cls.sk1_pkcs8 = SigningKey.from_pem(prv_key_str) + + prv_key_str = ( + "-----BEGIN EC PRIVATE KEY-----\n" + "MHcCAQEEIKlL2EAm5NPPZuXwxRf4nXMk0A80y6UUbiQ17be/qFhRoAoGCCqGSM49\n" + "AwEHoUQDQgAE4H3iRbG4TSrsSRb/gusPQB/4YcN8Poqzgjau4kfxBPyZimeRfuY/\n" + "9g/wMmPuhGl4BUve51DsnKJFRr8psk0ieA==\n" + "-----END EC PRIVATE KEY-----\n" + ) + cls.sk2 = SigningKey.from_pem(prv_key_str) + + def test_decoding_explicit_curve_parameters(self): + prv_key_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIBeQIBADCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAAB\n" + "AAAAAAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA\n" + "///////////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMV\n" + "AMSdNgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg\n" + "9KE5RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8A\n" + "AAAA//////////+85vqtpxeehPO5ysL8YyVRAgEBBG0wawIBAQQgIXtREfUmR16r\n" + "ZbmvDGD2lAEFPZa2DLPyz0czSja58yChRANCAASK9VJIGLOY5mxXO4rdf2CNl0//\n" + "yJWhIzDWX7daMI6qQWB9hKyR5OFOT6eFr1qtcZh8CGZbB+yIOCpnnwn0ekpl\n" + "-----END PRIVATE KEY-----\n" + ) + + sk = SigningKey.from_pem(prv_key_str) + + sk2 = SigningKey.from_string( + b"\x21\x7b\x51\x11\xf5\x26\x47\x5e\xab\x65\xb9\xaf\x0c\x60\xf6" + b"\x94\x01\x05\x3d\x96\xb6\x0c\xb3\xf2\xcf\x47\x33\x4a\x36\xb9" + b"\xf3\x20", + curve=NIST256p, + ) + + self.assertEqual(sk, sk2) + + def test_decoding_explicit_curve_parameters_with_explicit_disabled(self): + prv_key_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIBeQIBADCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAAB\n" + "AAAAAAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA\n" + "///////////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMV\n" + "AMSdNgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg\n" + "9KE5RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8A\n" + "AAAA//////////+85vqtpxeehPO5ysL8YyVRAgEBBG0wawIBAQQgIXtREfUmR16r\n" + "ZbmvDGD2lAEFPZa2DLPyz0czSja58yChRANCAASK9VJIGLOY5mxXO4rdf2CNl0//\n" + "yJWhIzDWX7daMI6qQWB9hKyR5OFOT6eFr1qtcZh8CGZbB+yIOCpnnwn0ekpl\n" + "-----END PRIVATE KEY-----\n" + ) + + with self.assertRaises(UnexpectedDER): + SigningKey.from_pem( + prv_key_str, valid_curve_encodings=["named_curve"] + ) + + def test_equality_on_signing_keys(self): + sk = SigningKey.from_secret_exponent( + self.sk1.privkey.secret_multiplier, self.sk1.curve + ) + self.assertEqual(self.sk1, sk) + self.assertEqual(self.sk1_pkcs8, sk) + + def test_verify_with_precompute(self): + sig = self.sk1.sign(b"message") + + vk = self.sk1.verifying_key + + vk.precompute() + + self.assertTrue(vk.verify(sig, b"message")) + + def test_compare_verifying_key_with_precompute(self): + vk1 = self.sk1.verifying_key + vk1.precompute() + + vk2 = self.sk1_pkcs8.verifying_key + + self.assertEqual(vk1, vk2) + + def test_verify_with_lazy_precompute(self): + sig = self.sk2.sign(b"other message") + + vk = self.sk2.verifying_key + + vk.precompute(lazy=True) + + self.assertTrue(vk.verify(sig, b"other message")) + + def test_inequality_on_signing_keys(self): + self.assertNotEqual(self.sk1, self.sk2) + + def test_inequality_on_signing_keys_not_implemented(self): + self.assertNotEqual(self.sk1, None) + + +class TestTrivialCurve(unittest.TestCase): + @classmethod + def setUpClass(cls): + # To test what happens with r or s in signing happens to be zero we + # need to find a scalar that creates one of the points on a curve that + # has x coordinate equal to zero. + # Even for secp112r2 curve that's non trivial so use this toy + # curve, for which we can iterate over all points quickly + curve = CurveFp(163, 84, 58) + gen = PointJacobi(curve, 2, 87, 1, 167, generator=True) + + cls.toy_curve = Curve("toy_p8", curve, gen, (1, 2, 0)) + + cls.sk = SigningKey.from_secret_exponent( + 140, cls.toy_curve, hashfunc=hashlib.sha1, + ) + + def test_generator_sanity(self): + gen = self.toy_curve.generator + + self.assertEqual(gen * gen.order(), INFINITY) + + def test_public_key_sanity(self): + self.assertEqual(self.sk.verifying_key.to_string(), b"\x98\x1e") + + def test_deterministic_sign(self): + sig = self.sk.sign_deterministic(b"message") + + self.assertEqual(sig, b"-.") + + self.assertTrue(self.sk.verifying_key.verify(sig, b"message")) + + def test_deterministic_sign_random_message(self): + msg = os.urandom(32) + sig = self.sk.sign_deterministic(msg) + self.assertEqual(len(sig), 2) + self.assertTrue(self.sk.verifying_key.verify(sig, msg)) + + def test_deterministic_sign_that_rises_R_zero_error(self): + # the raised RSZeroError is caught and handled internally by + # sign_deterministic methods + msg = b"\x00\x4f" + sig = self.sk.sign_deterministic(msg) + self.assertEqual(sig, b"\x36\x9e") + self.assertTrue(self.sk.verifying_key.verify(sig, msg)) + + def test_deterministic_sign_that_rises_S_zero_error(self): + msg = b"\x01\x6d" + sig = self.sk.sign_deterministic(msg) + self.assertEqual(sig, b"\x49\x6c") + self.assertTrue(self.sk.verifying_key.verify(sig, msg)) + + +# test VerifyingKey.verify() +prv_key_str = ( + "-----BEGIN EC PRIVATE KEY-----\n" + "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" + "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" + "bA==\n" + "-----END EC PRIVATE KEY-----\n" +) +key_bytes = unpem(prv_key_str) +assert isinstance(key_bytes, bytes) +sk = SigningKey.from_der(key_bytes) +vk = sk.verifying_key + +data = ( + b"some string for signing" + b"contents don't really matter" + b"but do include also some crazy values: " + b"\x00\x01\t\r\n\x00\x00\x00\xff\xf0" +) +assert len(data) % 4 == 0 +sha1 = hashlib.sha1() +sha1.update(data) +data_hash = sha1.digest() +assert isinstance(data_hash, bytes) +sig_raw = sk.sign(data, sigencode=sigencode_string) +assert isinstance(sig_raw, bytes) +sig_der = sk.sign(data, sigencode=sigencode_der) +assert isinstance(sig_der, bytes) +sig_strings = sk.sign(data, sigencode=sigencode_strings) +assert isinstance(sig_strings[0], bytes) + +verifiers = [] +for modifier, fun in [ + ("bytes", lambda x: x), + ("bytes memoryview", lambda x: buffer(x)), + ("bytearray", lambda x: bytearray(x)), + ("bytearray memoryview", lambda x: buffer(bytearray(x))), + ("array.array of bytes", lambda x: array.array("B", x)), + ("array.array of bytes memoryview", lambda x: buffer(array.array("B", x))), + ("array.array of ints", lambda x: array.array("I", x)), + ("array.array of ints memoryview", lambda x: buffer(array.array("I", x))), +]: + if "ints" in modifier: + conv = lambda x: x + else: + conv = fun + for sig_format, signature, decoder, mod_apply in [ + ("raw", sig_raw, sigdecode_string, lambda x: conv(x)), + ("der", sig_der, sigdecode_der, lambda x: conv(x)), + ( + "strings", + sig_strings, + sigdecode_strings, + lambda x: tuple(conv(i) for i in x), + ), + ]: + for method_name, vrf_mthd, vrf_data in [ + ("verify", vk.verify, data), + ("verify_digest", vk.verify_digest, data_hash), + ]: + verifiers.append( + pytest.param( + signature, + decoder, + mod_apply, + fun, + vrf_mthd, + vrf_data, + id="{2}-{0}-{1}".format(modifier, sig_format, method_name), + ) + ) + + +@pytest.mark.parametrize( + "signature,decoder,mod_apply,fun,vrf_mthd,vrf_data", verifiers +) +def test_VerifyingKey_verify( + signature, decoder, mod_apply, fun, vrf_mthd, vrf_data +): + sig = mod_apply(signature) + + assert vrf_mthd(sig, fun(vrf_data), sigdecode=decoder) + + +# test SigningKey.from_string() +prv_key_bytes = ( + b"^\xc8B\x0b\xd6\xef\x92R\xa9B\xe9\x89\x04<\xa2" + b"\x9fV\x1f\xa5%w\x0e\xb1\xc5" +) +assert len(prv_key_bytes) == 24 +converters = [] +for modifier, convert in [ + ("bytes", lambda x: x), + ("bytes memoryview", buffer), + ("bytearray", bytearray), + ("bytearray memoryview", lambda x: buffer(bytearray(x))), + ("array.array of bytes", lambda x: array.array("B", x)), + ("array.array of bytes memoryview", lambda x: buffer(array.array("B", x))), + ("array.array of ints", lambda x: array.array("I", x)), + ("array.array of ints memoryview", lambda x: buffer(array.array("I", x))), +]: + converters.append(pytest.param(convert, id=modifier)) + + +@pytest.mark.parametrize("convert", converters) +def test_SigningKey_from_string(convert): + key = convert(prv_key_bytes) + sk = SigningKey.from_string(key) + + assert sk.to_string() == prv_key_bytes + + +# test SigningKey.from_der() +prv_key_str = ( + "-----BEGIN EC PRIVATE KEY-----\n" + "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" + "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" + "bA==\n" + "-----END EC PRIVATE KEY-----\n" +) +key_bytes = unpem(prv_key_str) +assert isinstance(key_bytes, bytes) + +# last two converters are for array.array of ints, those require input +# that's multiple of 4, which no curve we support produces +@pytest.mark.parametrize("convert", converters[:-2]) +def test_SigningKey_from_der(convert): + key = convert(key_bytes) + sk = SigningKey.from_der(key) + + assert sk.to_string() == prv_key_bytes + + +# test SigningKey.sign_deterministic() +extra_entropy = b"\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11" + + +@pytest.mark.parametrize("convert", converters) +def test_SigningKey_sign_deterministic(convert): + sig = sk.sign_deterministic( + convert(data), extra_entropy=convert(extra_entropy) + ) + + vk.verify(sig, data) + + +# test SigningKey.sign_digest_deterministic() +@pytest.mark.parametrize("convert", converters) +def test_SigningKey_sign_digest_deterministic(convert): + sig = sk.sign_digest_deterministic( + convert(data_hash), extra_entropy=convert(extra_entropy) + ) + + vk.verify(sig, data) + + +@pytest.mark.parametrize("convert", converters) +def test_SigningKey_sign(convert): + sig = sk.sign(convert(data)) + + vk.verify(sig, data) + + +@pytest.mark.parametrize("convert", converters) +def test_SigningKey_sign_digest(convert): + sig = sk.sign_digest(convert(data_hash)) + + vk.verify(sig, data) + + +def test_SigningKey_with_unlikely_value(): + sk = SigningKey.from_secret_exponent(NIST256p.order - 1, curve=NIST256p) + vk = sk.verifying_key + sig = sk.sign(b"hello") + assert vk.verify(sig, b"hello") + + +def test_SigningKey_with_custom_curve_old_point(): + generator = generator_brainpoolp160r1 + generator = Point( + generator.curve(), generator.x(), generator.y(), generator.order(), + ) + + curve = Curve( + "BRAINPOOLP160r1", + generator.curve(), + generator, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 1), + ) + + sk = SigningKey.from_secret_exponent(12, curve) + + sk2 = SigningKey.from_secret_exponent(12, BRAINPOOLP160r1) + + assert sk.privkey == sk2.privkey + + +def test_VerifyingKey_inequality_with_different_curves(): + sk1 = SigningKey.from_secret_exponent(2, BRAINPOOLP160r1) + sk2 = SigningKey.from_secret_exponent(2, NIST256p) + + assert sk1.verifying_key != sk2.verifying_key + + +def test_VerifyingKey_inequality_with_different_secret_points(): + sk1 = SigningKey.from_secret_exponent(2, BRAINPOOLP160r1) + sk2 = SigningKey.from_secret_exponent(3, BRAINPOOLP160r1) + + assert sk1.verifying_key != sk2.verifying_key diff --git a/venv/lib/python3.10/site-packages/ecdsa/test_malformed_sigs.py b/venv/lib/python3.10/site-packages/ecdsa/test_malformed_sigs.py new file mode 100644 index 0000000..3cc1b2b --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/test_malformed_sigs.py @@ -0,0 +1,349 @@ +from __future__ import with_statement, division + +import hashlib + +try: + from hashlib import algorithms_available +except ImportError: # pragma: no cover + algorithms_available = [ + "md5", + "sha1", + "sha224", + "sha256", + "sha384", + "sha512", + ] +from functools import partial +import pytest +import sys +import hypothesis.strategies as st +from hypothesis import note, assume, given, settings, example + +from .keys import SigningKey +from .keys import BadSignatureError +from .util import sigencode_der, sigencode_string +from .util import sigdecode_der, sigdecode_string +from .curves import curves +from .der import ( + encode_integer, + encode_bitstring, + encode_octet_string, + encode_oid, + encode_sequence, + encode_constructed, +) + + +example_data = b"some data to sign" +"""Since the data is hashed for processing, really any string will do.""" + + +hash_and_size = [ + (name, hashlib.new(name).digest_size) for name in algorithms_available +] +"""Pairs of hash names and their output sizes. +Needed for pairing with curves as we don't support hashes +bigger than order sizes of curves.""" + + +keys_and_sigs = [] +"""Name of the curve+hash combination, VerifyingKey and DER signature.""" + + +# for hypothesis strategy shrinking we want smallest curves and hashes first +for curve in sorted(curves, key=lambda x: x.baselen): + for hash_alg in [ + name + for name, size in sorted(hash_and_size, key=lambda x: x[1]) + if 0 < size <= curve.baselen + ]: + sk = SigningKey.generate( + curve, hashfunc=partial(hashlib.new, hash_alg) + ) + + keys_and_sigs.append( + ( + "{0} {1}".format(curve, hash_alg), + sk.verifying_key, + sk.sign(example_data, sigencode=sigencode_der), + ) + ) + + +# first make sure that the signatures can be verified +@pytest.mark.parametrize( + "verifying_key,signature", + [pytest.param(vk, sig, id=name) for name, vk, sig in keys_and_sigs], +) +def test_signatures(verifying_key, signature): + assert verifying_key.verify( + signature, example_data, sigdecode=sigdecode_der + ) + + +@st.composite +def st_fuzzed_sig(draw, keys_and_sigs): + """ + Hypothesis strategy that generates pairs of VerifyingKey and malformed + signatures created by fuzzing of a valid signature. + """ + name, verifying_key, old_sig = draw(st.sampled_from(keys_and_sigs)) + note("Configuration: {0}".format(name)) + + sig = bytearray(old_sig) + + # decide which bytes should be removed + to_remove = draw( + st.lists(st.integers(min_value=0, max_value=len(sig) - 1), unique=True) + ) + to_remove.sort() + for i in reversed(to_remove): + del sig[i] + note("Remove bytes: {0}".format(to_remove)) + + # decide which bytes of the original signature should be changed + if sig: # pragma: no branch + xors = draw( + st.dictionaries( + st.integers(min_value=0, max_value=len(sig) - 1), + st.integers(min_value=1, max_value=255), + ) + ) + for i, val in xors.items(): + sig[i] ^= val + note("xors: {0}".format(xors)) + + # decide where new data should be inserted + insert_pos = draw(st.integers(min_value=0, max_value=len(sig))) + # NIST521p signature is about 140 bytes long, test slightly longer + insert_data = draw(st.binary(max_size=256)) + + sig = sig[:insert_pos] + insert_data + sig[insert_pos:] + note( + "Inserted at position {0} bytes: {1!r}".format(insert_pos, insert_data) + ) + + sig = bytes(sig) + # make sure that there was performed at least one mutation on the data + assume(to_remove or xors or insert_data) + # and that the mutations didn't cancel each-other out + assume(sig != old_sig) + + return verifying_key, sig + + +params = {} +# not supported in hypothesis 2.0.0 +if sys.version_info >= (2, 7): # pragma: no branch + from hypothesis import HealthCheck + + # deadline=5s because NIST521p are slow to verify + params["deadline"] = 5000 + params["suppress_health_check"] = [ + HealthCheck.data_too_large, + HealthCheck.filter_too_much, + HealthCheck.too_slow, + ] + +slow_params = dict(params) +slow_params["max_examples"] = 10 + + +@settings(**params) +@given(st_fuzzed_sig(keys_and_sigs)) +def test_fuzzed_der_signatures(args): + verifying_key, sig = args + + with pytest.raises(BadSignatureError): + verifying_key.verify(sig, example_data, sigdecode=sigdecode_der) + + +@st.composite +def st_random_der_ecdsa_sig_value(draw): + """ + Hypothesis strategy for selecting random values and encoding them + to ECDSA-Sig-Value object:: + + ECDSA-Sig-Value ::= SEQUENCE { + r INTEGER, + s INTEGER + } + """ + name, verifying_key, _ = draw(st.sampled_from(keys_and_sigs)) + note("Configuration: {0}".format(name)) + order = int(verifying_key.curve.order) + + # the encode_integer doesn't suport negative numbers, would be nice + # to generate them too, but we have coverage for remove_integer() + # verifying that it doesn't accept them, so meh. + # Test all numbers around the ones that can show up (around order) + # way smaller and slightly bigger + r = draw( + st.integers(min_value=0, max_value=order << 4) + | st.integers(min_value=order >> 2, max_value=order + 1) + ) + s = draw( + st.integers(min_value=0, max_value=order << 4) + | st.integers(min_value=order >> 2, max_value=order + 1) + ) + + sig = encode_sequence(encode_integer(r), encode_integer(s)) + + return verifying_key, sig + + +@settings(**slow_params) +@given(st_random_der_ecdsa_sig_value()) +def test_random_der_ecdsa_sig_value(params): + """ + Check if random values encoded in ECDSA-Sig-Value structure are rejected + as signature. + """ + verifying_key, sig = params + + with pytest.raises(BadSignatureError): + verifying_key.verify(sig, example_data, sigdecode=sigdecode_der) + + +def st_der_integer(*args, **kwargs): + """ + Hypothesis strategy that returns a random positive integer as DER + INTEGER. + Parameters are passed to hypothesis.strategy.integer. + """ + if "min_value" not in kwargs: # pragma: no branch + kwargs["min_value"] = 0 + return st.builds(encode_integer, st.integers(*args, **kwargs)) + + +@st.composite +def st_der_bit_string(draw, *args, **kwargs): + """ + Hypothesis strategy that returns a random DER BIT STRING. + Parameters are passed to hypothesis.strategy.binary. + """ + data = draw(st.binary(*args, **kwargs)) + if data: + unused = draw(st.integers(min_value=0, max_value=7)) + data = bytearray(data) + data[-1] &= -(2 ** unused) + data = bytes(data) + else: + unused = 0 + return encode_bitstring(data, unused) + + +def st_der_octet_string(*args, **kwargs): + """ + Hypothesis strategy that returns a random DER OCTET STRING object. + Parameters are passed to hypothesis.strategy.binary + """ + return st.builds(encode_octet_string, st.binary(*args, **kwargs)) + + +def st_der_null(): + """ + Hypothesis strategy that returns DER NULL object. + """ + return st.just(b"\x05\x00") + + +@st.composite +def st_der_oid(draw): + """ + Hypothesis strategy that returns DER OBJECT IDENTIFIER objects. + """ + first = draw(st.integers(min_value=0, max_value=2)) + if first < 2: + second = draw(st.integers(min_value=0, max_value=39)) + else: + second = draw(st.integers(min_value=0, max_value=2 ** 512)) + rest = draw( + st.lists(st.integers(min_value=0, max_value=2 ** 512), max_size=50) + ) + return encode_oid(first, second, *rest) + + +def st_der(): + """ + Hypothesis strategy that returns random DER structures. + + A valid DER structure is any primitive object, an octet encoding + of a valid DER structure, sequence of valid DER objects or a constructed + encoding of any of the above. + """ + return st.recursive( + st.just(b"") + | st_der_integer(max_value=2 ** 4096) + | st_der_bit_string(max_size=1024 ** 2) + | st_der_octet_string(max_size=1024 ** 2) + | st_der_null() + | st_der_oid(), + lambda children: st.builds( + lambda x: encode_octet_string(x), st.one_of(children) + ) + | st.builds(lambda x: encode_bitstring(x, 0), st.one_of(children)) + | st.builds( + lambda x: encode_sequence(*x), st.lists(children, max_size=200) + ) + | st.builds( + lambda tag, x: encode_constructed(tag, x), + st.integers(min_value=0, max_value=0x3F), + st.one_of(children), + ), + max_leaves=40, + ) + + +@settings(**params) +@given(st.sampled_from(keys_and_sigs), st_der()) +def test_random_der_as_signature(params, der): + """Check if random DER structures are rejected as signature""" + name, verifying_key, _ = params + + with pytest.raises(BadSignatureError): + verifying_key.verify(der, example_data, sigdecode=sigdecode_der) + + +@settings(**params) +@given(st.sampled_from(keys_and_sigs), st.binary(max_size=1024 ** 2)) +@example( + keys_and_sigs[0], encode_sequence(encode_integer(0), encode_integer(0)) +) +@example( + keys_and_sigs[0], + encode_sequence(encode_integer(1), encode_integer(1)) + b"\x00", +) +@example(keys_and_sigs[0], encode_sequence(*[encode_integer(1)] * 3)) +def test_random_bytes_as_signature(params, der): + """Check if random bytes are rejected as signature""" + name, verifying_key, _ = params + + with pytest.raises(BadSignatureError): + verifying_key.verify(der, example_data, sigdecode=sigdecode_der) + + +keys_and_string_sigs = [ + ( + name, + verifying_key, + sigencode_string( + *sigdecode_der(sig, verifying_key.curve.order), + order=verifying_key.curve.order + ), + ) + for name, verifying_key, sig in keys_and_sigs +] +""" +Name of the curve+hash combination, VerifyingKey and signature as a +byte string. +""" + + +@settings(**params) +@given(st_fuzzed_sig(keys_and_string_sigs)) +def test_fuzzed_string_signatures(params): + verifying_key, sig = params + + with pytest.raises(BadSignatureError): + verifying_key.verify(sig, example_data, sigdecode=sigdecode_string) diff --git a/venv/lib/python3.10/site-packages/ecdsa/test_numbertheory.py b/venv/lib/python3.10/site-packages/ecdsa/test_numbertheory.py new file mode 100644 index 0000000..d0b5e4d --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/test_numbertheory.py @@ -0,0 +1,408 @@ +import operator +from functools import reduce + +try: + import unittest2 as unittest +except ImportError: + import unittest +import hypothesis.strategies as st +import pytest +from hypothesis import given, settings, example + +try: + from hypothesis import HealthCheck + + HC_PRESENT = True +except ImportError: # pragma: no cover + HC_PRESENT = False +from .numbertheory import ( + SquareRootError, + factorization, + gcd, + lcm, + jacobi, + inverse_mod, + is_prime, + next_prime, + smallprimes, + square_root_mod_prime, +) + + +BIGPRIMES = ( + 999671, + 999683, + 999721, + 999727, + 999749, + 999763, + 999769, + 999773, + 999809, + 999853, + 999863, + 999883, + 999907, + 999917, + 999931, + 999953, + 999959, + 999961, + 999979, + 999983, +) + + +@pytest.mark.parametrize( + "prime, next_p", [(p, q) for p, q in zip(BIGPRIMES[:-1], BIGPRIMES[1:])] +) +def test_next_prime(prime, next_p): + assert next_prime(prime) == next_p + + +@pytest.mark.parametrize("val", [-1, 0, 1]) +def test_next_prime_with_nums_less_2(val): + assert next_prime(val) == 2 + + +@pytest.mark.parametrize("prime", smallprimes) +def test_square_root_mod_prime_for_small_primes(prime): + squares = set() + for num in range(0, 1 + prime // 2): + sq = num * num % prime + squares.add(sq) + root = square_root_mod_prime(sq, prime) + # tested for real with TestNumbertheory.test_square_root_mod_prime + assert root * root % prime == sq + + for nonsquare in range(0, prime): + if nonsquare in squares: + continue + with pytest.raises(SquareRootError): + square_root_mod_prime(nonsquare, prime) + + +def test_square_root_mod_prime_for_2(): + a = square_root_mod_prime(1, 2) + assert a == 1 + + +def test_square_root_mod_prime_for_small_prime(): + root = square_root_mod_prime(98 ** 2 % 101, 101) + assert root * root % 101 == 9 + + +def test_square_root_mod_prime_for_p_congruent_5(): + p = 13 + assert p % 8 == 5 + + root = square_root_mod_prime(3, p) + assert root * root % p == 3 + + +def test_square_root_mod_prime_for_p_congruent_5_large_d(): + p = 29 + assert p % 8 == 5 + + root = square_root_mod_prime(4, p) + assert root * root % p == 4 + + +@st.composite +def st_two_nums_rel_prime(draw): + # 521-bit is the biggest curve we operate on, use 1024 for a bit + # of breathing space + mod = draw(st.integers(min_value=2, max_value=2 ** 1024)) + num = draw( + st.integers(min_value=1, max_value=mod - 1).filter( + lambda x: gcd(x, mod) == 1 + ) + ) + return num, mod + + +@st.composite +def st_primes(draw, *args, **kwargs): + if "min_value" not in kwargs: # pragma: no branch + kwargs["min_value"] = 1 + prime = draw( + st.sampled_from(smallprimes) + | st.integers(*args, **kwargs).filter(is_prime) + ) + return prime + + +@st.composite +def st_num_square_prime(draw): + prime = draw(st_primes(max_value=2 ** 1024)) + num = draw(st.integers(min_value=0, max_value=1 + prime // 2)) + sq = num * num % prime + return sq, prime + + +@st.composite +def st_comp_with_com_fac(draw): + """ + Strategy that returns lists of numbers, all having a common factor. + """ + primes = draw( + st.lists(st_primes(max_value=2 ** 512), min_size=1, max_size=10) + ) + # select random prime(s) that will make the common factor of composites + com_fac_primes = draw( + st.lists(st.sampled_from(primes), min_size=1, max_size=20) + ) + com_fac = reduce(operator.mul, com_fac_primes, 1) + + # select at most 20 lists (returned numbers), + # each having at most 30 primes (factors) including none (then the number + # will be 1) + comp_primes = draw( + st.integers(min_value=1, max_value=20).flatmap( + lambda n: st.lists( + st.lists(st.sampled_from(primes), max_size=30), + min_size=1, + max_size=n, + ) + ) + ) + + return [reduce(operator.mul, nums, 1) * com_fac for nums in comp_primes] + + +@st.composite +def st_comp_no_com_fac(draw): + """ + Strategy that returns lists of numbers that don't have a common factor. + """ + primes = draw( + st.lists( + st_primes(max_value=2 ** 512), min_size=2, max_size=10, unique=True + ) + ) + # first select the primes that will create the uncommon factor + # between returned numbers + uncom_fac_primes = draw( + st.lists( + st.sampled_from(primes), + min_size=1, + max_size=len(primes) - 1, + unique=True, + ) + ) + uncom_fac = reduce(operator.mul, uncom_fac_primes, 1) + + # then build composites from leftover primes + leftover_primes = [i for i in primes if i not in uncom_fac_primes] + + assert leftover_primes + assert uncom_fac_primes + + # select at most 20 lists, each having at most 30 primes + # selected from the leftover_primes list + number_primes = draw( + st.integers(min_value=1, max_value=20).flatmap( + lambda n: st.lists( + st.lists(st.sampled_from(leftover_primes), max_size=30), + min_size=1, + max_size=n, + ) + ) + ) + + numbers = [reduce(operator.mul, nums, 1) for nums in number_primes] + + insert_at = draw(st.integers(min_value=0, max_value=len(numbers))) + numbers.insert(insert_at, uncom_fac) + return numbers + + +HYP_SETTINGS = {} +if HC_PRESENT: # pragma: no branch + HYP_SETTINGS["suppress_health_check"] = [ + HealthCheck.filter_too_much, + HealthCheck.too_slow, + ] + # the factorization() sometimes takes a long time to finish + HYP_SETTINGS["deadline"] = 5000 + + +HYP_SLOW_SETTINGS = dict(HYP_SETTINGS) +HYP_SLOW_SETTINGS["max_examples"] = 10 + + +class TestIsPrime(unittest.TestCase): + def test_very_small_prime(self): + assert is_prime(23) + + def test_very_small_composite(self): + assert not is_prime(22) + + def test_small_prime(self): + assert is_prime(123456791) + + def test_special_composite(self): + assert not is_prime(10261) + + def test_medium_prime_1(self): + # nextPrime[2^256] + assert is_prime(2 ** 256 + 0x129) + + def test_medium_prime_2(self): + # nextPrime(2^256+0x129) + assert is_prime(2 ** 256 + 0x12D) + + def test_medium_trivial_composite(self): + assert not is_prime(2 ** 256 + 0x130) + + def test_medium_non_trivial_composite(self): + assert not is_prime(2 ** 256 + 0x12F) + + def test_large_prime(self): + # nextPrime[2^2048] + assert is_prime(2 ** 2048 + 0x3D5) + + +class TestNumbertheory(unittest.TestCase): + def test_gcd(self): + assert gcd(3 * 5 * 7, 3 * 5 * 11, 3 * 5 * 13) == 3 * 5 + assert gcd([3 * 5 * 7, 3 * 5 * 11, 3 * 5 * 13]) == 3 * 5 + assert gcd(3) == 3 + + @unittest.skipUnless( + HC_PRESENT, + "Hypothesis 2.0.0 can't be made tolerant of hard to " + "meet requirements (like `is_prime()`), the test " + "case times-out on it", + ) + @settings(**HYP_SLOW_SETTINGS) + @given(st_comp_with_com_fac()) + def test_gcd_with_com_factor(self, numbers): + n = gcd(numbers) + assert 1 in numbers or n != 1 + for i in numbers: + assert i % n == 0 + + @unittest.skipUnless( + HC_PRESENT, + "Hypothesis 2.0.0 can't be made tolerant of hard to " + "meet requirements (like `is_prime()`), the test " + "case times-out on it", + ) + @settings(**HYP_SLOW_SETTINGS) + @given(st_comp_no_com_fac()) + def test_gcd_with_uncom_factor(self, numbers): + n = gcd(numbers) + assert n == 1 + + @given( + st.lists( + st.integers(min_value=1, max_value=2 ** 8192), + min_size=1, + max_size=20, + ) + ) + def test_gcd_with_random_numbers(self, numbers): + n = gcd(numbers) + for i in numbers: + # check that at least it's a divider + assert i % n == 0 + + def test_lcm(self): + assert lcm(3, 5 * 3, 7 * 3) == 3 * 5 * 7 + assert lcm([3, 5 * 3, 7 * 3]) == 3 * 5 * 7 + assert lcm(3) == 3 + + @given( + st.lists( + st.integers(min_value=1, max_value=2 ** 8192), + min_size=1, + max_size=20, + ) + ) + def test_lcm_with_random_numbers(self, numbers): + n = lcm(numbers) + for i in numbers: + assert n % i == 0 + + @unittest.skipUnless( + HC_PRESENT, + "Hypothesis 2.0.0 can't be made tolerant of hard to " + "meet requirements (like `is_prime()`), the test " + "case times-out on it", + ) + @settings(**HYP_SETTINGS) + @given(st_num_square_prime()) + def test_square_root_mod_prime(self, vals): + square, prime = vals + + calc = square_root_mod_prime(square, prime) + assert calc * calc % prime == square + + @settings(**HYP_SETTINGS) + @given(st.integers(min_value=1, max_value=10 ** 12)) + @example(265399 * 1526929) + @example(373297 ** 2 * 553991) + def test_factorization(self, num): + factors = factorization(num) + mult = 1 + for i in factors: + mult *= i[0] ** i[1] + assert mult == num + + def test_factorisation_smallprimes(self): + exp = 101 * 103 + assert 101 in smallprimes + assert 103 in smallprimes + factors = factorization(exp) + mult = 1 + for i in factors: + mult *= i[0] ** i[1] + assert mult == exp + + def test_factorisation_not_smallprimes(self): + exp = 1231 * 1237 + assert 1231 not in smallprimes + assert 1237 not in smallprimes + factors = factorization(exp) + mult = 1 + for i in factors: + mult *= i[0] ** i[1] + assert mult == exp + + def test_jacobi_with_zero(self): + assert jacobi(0, 3) == 0 + + def test_jacobi_with_one(self): + assert jacobi(1, 3) == 1 + + @settings(**HYP_SETTINGS) + @given(st.integers(min_value=3, max_value=1000).filter(lambda x: x % 2)) + def test_jacobi(self, mod): + if is_prime(mod): + squares = set() + for root in range(1, mod): + assert jacobi(root * root, mod) == 1 + squares.add(root * root % mod) + for i in range(1, mod): + if i not in squares: + assert jacobi(i, mod) == -1 + else: + factors = factorization(mod) + for a in range(1, mod): + c = 1 + for i in factors: + c *= jacobi(a, i[0]) ** i[1] + assert c == jacobi(a, mod) + + @given(st_two_nums_rel_prime()) + def test_inverse_mod(self, nums): + num, mod = nums + + inv = inverse_mod(num, mod) + + assert 0 < inv < mod + assert num * inv % mod == 1 + + def test_inverse_mod_with_zero(self): + assert 0 == inverse_mod(0, 11) diff --git a/venv/lib/python3.10/site-packages/ecdsa/test_pyecdsa.py b/venv/lib/python3.10/site-packages/ecdsa/test_pyecdsa.py new file mode 100644 index 0000000..c1393bc --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/test_pyecdsa.py @@ -0,0 +1,2151 @@ +from __future__ import with_statement, division + +try: + import unittest2 as unittest +except ImportError: + import unittest +import os +import sys +import shutil +import subprocess +import pytest +from binascii import hexlify, unhexlify +from hashlib import sha1, sha256, sha384, sha512 +import hashlib +from functools import partial + +from hypothesis import given +import hypothesis.strategies as st + +from six import b, print_, binary_type +from .keys import SigningKey, VerifyingKey +from .keys import BadSignatureError, MalformedPointError, BadDigestError +from . import util +from .util import sigencode_der, sigencode_strings +from .util import sigdecode_der, sigdecode_strings +from .util import number_to_string, encoded_oid_ecPublicKey, MalformedSignature +from .curves import Curve, UnknownCurveError +from .curves import ( + SECP112r1, + SECP112r2, + SECP128r1, + SECP160r1, + NIST192p, + NIST224p, + NIST256p, + NIST384p, + NIST521p, + SECP256k1, + BRAINPOOLP160r1, + BRAINPOOLP192r1, + BRAINPOOLP224r1, + BRAINPOOLP256r1, + BRAINPOOLP320r1, + BRAINPOOLP384r1, + BRAINPOOLP512r1, + curves, +) +from .ecdsa import ( + curve_brainpoolp224r1, + curve_brainpoolp256r1, + curve_brainpoolp384r1, + curve_brainpoolp512r1, +) +from .ellipticcurve import Point +from . import der +from . import rfc6979 +from . import ecdsa + + +class SubprocessError(Exception): + pass + + +def run_openssl(cmd): + OPENSSL = "openssl" + p = subprocess.Popen( + [OPENSSL] + cmd.split(), + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + stdout, ignored = p.communicate() + if p.returncode != 0: + raise SubprocessError( + "cmd '%s %s' failed: rc=%s, stdout/err was %s" + % (OPENSSL, cmd, p.returncode, stdout) + ) + return stdout.decode() + + +class ECDSA(unittest.TestCase): + def test_basic(self): + priv = SigningKey.generate() + pub = priv.get_verifying_key() + + data = b("blahblah") + sig = priv.sign(data) + + self.assertTrue(pub.verify(sig, data)) + self.assertRaises(BadSignatureError, pub.verify, sig, data + b("bad")) + + pub2 = VerifyingKey.from_string(pub.to_string()) + self.assertTrue(pub2.verify(sig, data)) + + def test_deterministic(self): + data = b("blahblah") + secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16) + + priv = SigningKey.from_secret_exponent(secexp, SECP256k1, sha256) + pub = priv.get_verifying_key() + + k = rfc6979.generate_k( + SECP256k1.generator.order(), secexp, sha256, sha256(data).digest() + ) + + sig1 = priv.sign(data, k=k) + self.assertTrue(pub.verify(sig1, data)) + + sig2 = priv.sign(data, k=k) + self.assertTrue(pub.verify(sig2, data)) + + sig3 = priv.sign_deterministic(data, sha256) + self.assertTrue(pub.verify(sig3, data)) + + self.assertEqual(sig1, sig2) + self.assertEqual(sig1, sig3) + + def test_bad_usage(self): + # sk=SigningKey() is wrong + self.assertRaises(TypeError, SigningKey) + self.assertRaises(TypeError, VerifyingKey) + + def test_lengths(self): + default = NIST192p + priv = SigningKey.generate() + pub = priv.get_verifying_key() + self.assertEqual(len(pub.to_string()), default.verifying_key_length) + sig = priv.sign(b("data")) + self.assertEqual(len(sig), default.signature_length) + for curve in ( + NIST192p, + NIST224p, + NIST256p, + NIST384p, + NIST521p, + BRAINPOOLP160r1, + BRAINPOOLP192r1, + BRAINPOOLP224r1, + BRAINPOOLP256r1, + BRAINPOOLP320r1, + BRAINPOOLP384r1, + BRAINPOOLP512r1, + ): + priv = SigningKey.generate(curve=curve) + pub1 = priv.get_verifying_key() + pub2 = VerifyingKey.from_string(pub1.to_string(), curve) + self.assertEqual(pub1.to_string(), pub2.to_string()) + self.assertEqual(len(pub1.to_string()), curve.verifying_key_length) + sig = priv.sign(b("data")) + self.assertEqual(len(sig), curve.signature_length) + + def test_serialize(self): + seed = b("secret") + curve = NIST192p + secexp1 = util.randrange_from_seed__trytryagain(seed, curve.order) + secexp2 = util.randrange_from_seed__trytryagain(seed, curve.order) + self.assertEqual(secexp1, secexp2) + priv1 = SigningKey.from_secret_exponent(secexp1, curve) + priv2 = SigningKey.from_secret_exponent(secexp2, curve) + self.assertEqual( + hexlify(priv1.to_string()), hexlify(priv2.to_string()) + ) + self.assertEqual(priv1.to_pem(), priv2.to_pem()) + pub1 = priv1.get_verifying_key() + pub2 = priv2.get_verifying_key() + data = b("data") + sig1 = priv1.sign(data) + sig2 = priv2.sign(data) + self.assertTrue(pub1.verify(sig1, data)) + self.assertTrue(pub2.verify(sig1, data)) + self.assertTrue(pub1.verify(sig2, data)) + self.assertTrue(pub2.verify(sig2, data)) + self.assertEqual(hexlify(pub1.to_string()), hexlify(pub2.to_string())) + + def test_nonrandom(self): + s = b("all the entropy in the entire world, compressed into one line") + + def not_much_entropy(numbytes): + return s[:numbytes] + + # we control the entropy source, these two keys should be identical: + priv1 = SigningKey.generate(entropy=not_much_entropy) + priv2 = SigningKey.generate(entropy=not_much_entropy) + self.assertEqual( + hexlify(priv1.get_verifying_key().to_string()), + hexlify(priv2.get_verifying_key().to_string()), + ) + # likewise, signatures should be identical. Obviously you'd never + # want to do this with keys you care about, because the secrecy of + # the private key depends upon using different random numbers for + # each signature + sig1 = priv1.sign(b("data"), entropy=not_much_entropy) + sig2 = priv2.sign(b("data"), entropy=not_much_entropy) + self.assertEqual(hexlify(sig1), hexlify(sig2)) + + def assertTruePrivkeysEqual(self, priv1, priv2): + self.assertEqual( + priv1.privkey.secret_multiplier, priv2.privkey.secret_multiplier + ) + self.assertEqual( + priv1.privkey.public_key.generator, + priv2.privkey.public_key.generator, + ) + + def test_privkey_creation(self): + s = b("all the entropy in the entire world, compressed into one line") + + def not_much_entropy(numbytes): + return s[:numbytes] + + priv1 = SigningKey.generate() + self.assertEqual(priv1.baselen, NIST192p.baselen) + + priv1 = SigningKey.generate(curve=NIST224p) + self.assertEqual(priv1.baselen, NIST224p.baselen) + + priv1 = SigningKey.generate(entropy=not_much_entropy) + self.assertEqual(priv1.baselen, NIST192p.baselen) + priv2 = SigningKey.generate(entropy=not_much_entropy) + self.assertEqual(priv2.baselen, NIST192p.baselen) + self.assertTruePrivkeysEqual(priv1, priv2) + + priv1 = SigningKey.from_secret_exponent(secexp=3) + self.assertEqual(priv1.baselen, NIST192p.baselen) + priv2 = SigningKey.from_secret_exponent(secexp=3) + self.assertTruePrivkeysEqual(priv1, priv2) + + priv1 = SigningKey.from_secret_exponent(secexp=4, curve=NIST224p) + self.assertEqual(priv1.baselen, NIST224p.baselen) + + def test_privkey_strings(self): + priv1 = SigningKey.generate() + s1 = priv1.to_string() + self.assertEqual(type(s1), binary_type) + self.assertEqual(len(s1), NIST192p.baselen) + priv2 = SigningKey.from_string(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + s1 = priv1.to_pem() + self.assertEqual(type(s1), binary_type) + self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----"))) + self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----"))) + priv2 = SigningKey.from_pem(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + s1 = priv1.to_der() + self.assertEqual(type(s1), binary_type) + priv2 = SigningKey.from_der(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + priv1 = SigningKey.generate(curve=NIST256p) + s1 = priv1.to_pem() + self.assertEqual(type(s1), binary_type) + self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----"))) + self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----"))) + priv2 = SigningKey.from_pem(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + s1 = priv1.to_der() + self.assertEqual(type(s1), binary_type) + priv2 = SigningKey.from_der(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + def test_privkey_strings_brainpool(self): + priv1 = SigningKey.generate(curve=BRAINPOOLP512r1) + s1 = priv1.to_pem() + self.assertEqual(type(s1), binary_type) + self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----"))) + self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----"))) + priv2 = SigningKey.from_pem(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + s1 = priv1.to_der() + self.assertEqual(type(s1), binary_type) + priv2 = SigningKey.from_der(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + def assertTruePubkeysEqual(self, pub1, pub2): + self.assertEqual(pub1.pubkey.point, pub2.pubkey.point) + self.assertEqual(pub1.pubkey.generator, pub2.pubkey.generator) + self.assertEqual(pub1.curve, pub2.curve) + + def test_pubkey_strings(self): + priv1 = SigningKey.generate() + pub1 = priv1.get_verifying_key() + s1 = pub1.to_string() + self.assertEqual(type(s1), binary_type) + self.assertEqual(len(s1), NIST192p.verifying_key_length) + pub2 = VerifyingKey.from_string(s1) + self.assertTruePubkeysEqual(pub1, pub2) + + priv1 = SigningKey.generate(curve=NIST256p) + pub1 = priv1.get_verifying_key() + s1 = pub1.to_string() + self.assertEqual(type(s1), binary_type) + self.assertEqual(len(s1), NIST256p.verifying_key_length) + pub2 = VerifyingKey.from_string(s1, curve=NIST256p) + self.assertTruePubkeysEqual(pub1, pub2) + + pub1_der = pub1.to_der() + self.assertEqual(type(pub1_der), binary_type) + pub2 = VerifyingKey.from_der(pub1_der) + self.assertTruePubkeysEqual(pub1, pub2) + + self.assertRaises( + der.UnexpectedDER, VerifyingKey.from_der, pub1_der + b("junk") + ) + badpub = VerifyingKey.from_der(pub1_der) + + class FakeGenerator: + def order(self): + return 123456789 + + class FakeCurveFp: + def p(self): + return int( + "6525534529039240705020950546962731340" + "4541085228058844382513856749047873406763" + ) + + badcurve = Curve( + "unknown", FakeCurveFp(), FakeGenerator(), (1, 2, 3, 4, 5, 6), None + ) + badpub.curve = badcurve + badder = badpub.to_der() + self.assertRaises(UnknownCurveError, VerifyingKey.from_der, badder) + + pem = pub1.to_pem() + self.assertEqual(type(pem), binary_type) + self.assertTrue(pem.startswith(b("-----BEGIN PUBLIC KEY-----")), pem) + self.assertTrue( + pem.strip().endswith(b("-----END PUBLIC KEY-----")), pem + ) + pub2 = VerifyingKey.from_pem(pem) + self.assertTruePubkeysEqual(pub1, pub2) + + def test_pubkey_strings_brainpool(self): + priv1 = SigningKey.generate(curve=BRAINPOOLP512r1) + pub1 = priv1.get_verifying_key() + s1 = pub1.to_string() + self.assertEqual(type(s1), binary_type) + self.assertEqual(len(s1), BRAINPOOLP512r1.verifying_key_length) + pub2 = VerifyingKey.from_string(s1, curve=BRAINPOOLP512r1) + self.assertTruePubkeysEqual(pub1, pub2) + + pub1_der = pub1.to_der() + self.assertEqual(type(pub1_der), binary_type) + pub2 = VerifyingKey.from_der(pub1_der) + self.assertTruePubkeysEqual(pub1, pub2) + + def test_vk_to_der_with_invalid_point_encoding(self): + sk = SigningKey.generate() + vk = sk.verifying_key + + with self.assertRaises(ValueError): + vk.to_der("raw") + + def test_sk_to_der_with_invalid_point_encoding(self): + sk = SigningKey.generate() + + with self.assertRaises(ValueError): + sk.to_der("raw") + + def test_vk_from_der_garbage_after_curve_oid(self): + type_oid_der = encoded_oid_ecPublicKey + curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + b( + "garbage" + ) + enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) + point_der = der.encode_bitstring(b"\x00\xff", None) + to_decode = der.encode_sequence(enc_type_der, point_der) + + with self.assertRaises(der.UnexpectedDER): + VerifyingKey.from_der(to_decode) + + def test_vk_from_der_invalid_key_type(self): + type_oid_der = der.encode_oid(*(1, 2, 3)) + curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) + point_der = der.encode_bitstring(b"\x00\xff", None) + to_decode = der.encode_sequence(enc_type_der, point_der) + + with self.assertRaises(der.UnexpectedDER): + VerifyingKey.from_der(to_decode) + + def test_vk_from_der_garbage_after_point_string(self): + type_oid_der = encoded_oid_ecPublicKey + curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) + point_der = der.encode_bitstring(b"\x00\xff", None) + b("garbage") + to_decode = der.encode_sequence(enc_type_der, point_der) + + with self.assertRaises(der.UnexpectedDER): + VerifyingKey.from_der(to_decode) + + def test_vk_from_der_invalid_bitstring(self): + type_oid_der = encoded_oid_ecPublicKey + curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) + point_der = der.encode_bitstring(b"\x08\xff", None) + to_decode = der.encode_sequence(enc_type_der, point_der) + + with self.assertRaises(der.UnexpectedDER): + VerifyingKey.from_der(to_decode) + + def test_vk_from_der_with_invalid_length_of_encoding(self): + type_oid_der = encoded_oid_ecPublicKey + curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) + point_der = der.encode_bitstring(b"\xff" * 64, 0) + to_decode = der.encode_sequence(enc_type_der, point_der) + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_der(to_decode) + + def test_vk_from_der_with_raw_encoding(self): + type_oid_der = encoded_oid_ecPublicKey + curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) + point_der = der.encode_bitstring(b"\xff" * 48, 0) + to_decode = der.encode_sequence(enc_type_der, point_der) + + with self.assertRaises(der.UnexpectedDER): + VerifyingKey.from_der(to_decode) + + def test_signature_strings(self): + priv1 = SigningKey.generate() + pub1 = priv1.get_verifying_key() + data = b("data") + + sig = priv1.sign(data) + self.assertEqual(type(sig), binary_type) + self.assertEqual(len(sig), NIST192p.signature_length) + self.assertTrue(pub1.verify(sig, data)) + + sig = priv1.sign(data, sigencode=sigencode_strings) + self.assertEqual(type(sig), tuple) + self.assertEqual(len(sig), 2) + self.assertEqual(type(sig[0]), binary_type) + self.assertEqual(type(sig[1]), binary_type) + self.assertEqual(len(sig[0]), NIST192p.baselen) + self.assertEqual(len(sig[1]), NIST192p.baselen) + self.assertTrue(pub1.verify(sig, data, sigdecode=sigdecode_strings)) + + sig_der = priv1.sign(data, sigencode=sigencode_der) + self.assertEqual(type(sig_der), binary_type) + self.assertTrue(pub1.verify(sig_der, data, sigdecode=sigdecode_der)) + + def test_sig_decode_strings_with_invalid_count(self): + with self.assertRaises(MalformedSignature): + sigdecode_strings([b("one"), b("two"), b("three")], 0xFF) + + def test_sig_decode_strings_with_wrong_r_len(self): + with self.assertRaises(MalformedSignature): + sigdecode_strings([b("one"), b("two")], 0xFF) + + def test_sig_decode_strings_with_wrong_s_len(self): + with self.assertRaises(MalformedSignature): + sigdecode_strings([b("\xa0"), b("\xb0\xff")], 0xFF) + + def test_verify_with_too_long_input(self): + sk = SigningKey.generate() + vk = sk.verifying_key + + with self.assertRaises(BadDigestError): + vk.verify_digest(None, b("\x00") * 128) + + def test_sk_from_secret_exponent_with_wrong_sec_exponent(self): + with self.assertRaises(MalformedPointError): + SigningKey.from_secret_exponent(0) + + def test_sk_from_string_with_wrong_len_string(self): + with self.assertRaises(MalformedPointError): + SigningKey.from_string(b("\x01")) + + def test_sk_from_der_with_junk_after_sequence(self): + ver_der = der.encode_integer(1) + to_decode = der.encode_sequence(ver_der) + b("garbage") + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_der_with_wrong_version(self): + ver_der = der.encode_integer(0) + to_decode = der.encode_sequence(ver_der) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_der_invalid_const_tag(self): + ver_der = der.encode_integer(1) + privkey_der = der.encode_octet_string(b("\x00\xff")) + curve_oid_der = der.encode_oid(*(1, 2, 3)) + const_der = der.encode_constructed(1, curve_oid_der) + to_decode = der.encode_sequence( + ver_der, privkey_der, const_der, curve_oid_der + ) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_der_garbage_after_privkey_oid(self): + ver_der = der.encode_integer(1) + privkey_der = der.encode_octet_string(b("\x00\xff")) + curve_oid_der = der.encode_oid(*(1, 2, 3)) + b("garbage") + const_der = der.encode_constructed(0, curve_oid_der) + to_decode = der.encode_sequence( + ver_der, privkey_der, const_der, curve_oid_der + ) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_der_with_short_privkey(self): + ver_der = der.encode_integer(1) + privkey_der = der.encode_octet_string(b("\x00\xff")) + curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + const_der = der.encode_constructed(0, curve_oid_der) + to_decode = der.encode_sequence( + ver_der, privkey_der, const_der, curve_oid_der + ) + + sk = SigningKey.from_der(to_decode) + self.assertEqual(sk.privkey.secret_multiplier, 255) + + def test_sk_from_p8_der_with_wrong_version(self): + ver_der = der.encode_integer(2) + algorithm_der = der.encode_sequence( + der.encode_oid(1, 2, 840, 10045, 2, 1), + der.encode_oid(1, 2, 840, 10045, 3, 1, 1), + ) + privkey_der = der.encode_octet_string( + der.encode_sequence( + der.encode_integer(1), der.encode_octet_string(b"\x00\xff") + ) + ) + to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_p8_der_with_wrong_algorithm(self): + ver_der = der.encode_integer(1) + algorithm_der = der.encode_sequence( + der.encode_oid(1, 2, 3), der.encode_oid(1, 2, 840, 10045, 3, 1, 1) + ) + privkey_der = der.encode_octet_string( + der.encode_sequence( + der.encode_integer(1), der.encode_octet_string(b"\x00\xff") + ) + ) + to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_p8_der_with_trailing_junk_after_algorithm(self): + ver_der = der.encode_integer(1) + algorithm_der = der.encode_sequence( + der.encode_oid(1, 2, 840, 10045, 2, 1), + der.encode_oid(1, 2, 840, 10045, 3, 1, 1), + der.encode_octet_string(b"junk"), + ) + privkey_der = der.encode_octet_string( + der.encode_sequence( + der.encode_integer(1), der.encode_octet_string(b"\x00\xff") + ) + ) + to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_p8_der_with_trailing_junk_after_key(self): + ver_der = der.encode_integer(1) + algorithm_der = der.encode_sequence( + der.encode_oid(1, 2, 840, 10045, 2, 1), + der.encode_oid(1, 2, 840, 10045, 3, 1, 1), + ) + privkey_der = der.encode_octet_string( + der.encode_sequence( + der.encode_integer(1), der.encode_octet_string(b"\x00\xff") + ) + + der.encode_integer(999) + ) + to_decode = der.encode_sequence( + ver_der, + algorithm_der, + privkey_der, + der.encode_octet_string(b"junk"), + ) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sign_with_too_long_hash(self): + sk = SigningKey.from_secret_exponent(12) + + with self.assertRaises(BadDigestError): + sk.sign_digest(b("\xff") * 64) + + def test_hashfunc(self): + sk = SigningKey.generate(curve=NIST256p, hashfunc=sha256) + data = b("security level is 128 bits") + sig = sk.sign(data) + vk = VerifyingKey.from_string( + sk.get_verifying_key().to_string(), curve=NIST256p, hashfunc=sha256 + ) + self.assertTrue(vk.verify(sig, data)) + + sk2 = SigningKey.generate(curve=NIST256p) + sig2 = sk2.sign(data, hashfunc=sha256) + vk2 = VerifyingKey.from_string( + sk2.get_verifying_key().to_string(), + curve=NIST256p, + hashfunc=sha256, + ) + self.assertTrue(vk2.verify(sig2, data)) + + vk3 = VerifyingKey.from_string( + sk.get_verifying_key().to_string(), curve=NIST256p + ) + self.assertTrue(vk3.verify(sig, data, hashfunc=sha256)) + + def test_public_key_recovery(self): + # Create keys + curve = BRAINPOOLP160r1 + + sk = SigningKey.generate(curve=curve) + vk = sk.get_verifying_key() + + # Sign a message + data = b("blahblah") + signature = sk.sign(data) + + # Recover verifying keys + recovered_vks = VerifyingKey.from_public_key_recovery( + signature, data, curve + ) + + # Test if each pk is valid + for recovered_vk in recovered_vks: + # Test if recovered vk is valid for the data + self.assertTrue(recovered_vk.verify(signature, data)) + + # Test if properties are equal + self.assertEqual(vk.curve, recovered_vk.curve) + self.assertEqual( + vk.default_hashfunc, recovered_vk.default_hashfunc + ) + + # Test if original vk is the list of recovered keys + self.assertIn( + vk.pubkey.point, + [recovered_vk.pubkey.point for recovered_vk in recovered_vks], + ) + + def test_public_key_recovery_with_custom_hash(self): + # Create keys + curve = BRAINPOOLP160r1 + + sk = SigningKey.generate(curve=curve, hashfunc=sha256) + vk = sk.get_verifying_key() + + # Sign a message + data = b("blahblah") + signature = sk.sign(data) + + # Recover verifying keys + recovered_vks = VerifyingKey.from_public_key_recovery( + signature, data, curve, hashfunc=sha256, allow_truncate=True + ) + + # Test if each pk is valid + for recovered_vk in recovered_vks: + # Test if recovered vk is valid for the data + self.assertTrue(recovered_vk.verify(signature, data)) + + # Test if properties are equal + self.assertEqual(vk.curve, recovered_vk.curve) + self.assertEqual(sha256, recovered_vk.default_hashfunc) + + # Test if original vk is the list of recovered keys + self.assertIn( + vk.pubkey.point, + [recovered_vk.pubkey.point for recovered_vk in recovered_vks], + ) + + def test_encoding(self): + sk = SigningKey.from_secret_exponent(123456789) + vk = sk.verifying_key + + exp = b( + "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + self.assertEqual(vk.to_string(), exp) + self.assertEqual(vk.to_string("raw"), exp) + self.assertEqual(vk.to_string("uncompressed"), b("\x04") + exp) + self.assertEqual(vk.to_string("compressed"), b("\x02") + exp[:24]) + self.assertEqual(vk.to_string("hybrid"), b("\x06") + exp) + + def test_decoding(self): + sk = SigningKey.from_secret_exponent(123456789) + vk = sk.verifying_key + + enc = b( + "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + + from_raw = VerifyingKey.from_string(enc) + self.assertEqual(from_raw.pubkey.point, vk.pubkey.point) + + from_uncompressed = VerifyingKey.from_string(b("\x04") + enc) + self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point) + + from_compressed = VerifyingKey.from_string(b("\x02") + enc[:24]) + self.assertEqual(from_compressed.pubkey.point, vk.pubkey.point) + + from_uncompressed = VerifyingKey.from_string(b("\x06") + enc) + self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point) + + def test_uncompressed_decoding_as_only_alowed(self): + enc = b( + "\x04" + "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + vk = VerifyingKey.from_string(enc, valid_encodings=("uncompressed",)) + sk = SigningKey.from_secret_exponent(123456789) + + self.assertEqual(vk, sk.verifying_key) + + def test_raw_decoding_with_blocked_format(self): + enc = b( + "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + with self.assertRaises(MalformedPointError) as exp: + VerifyingKey.from_string(enc, valid_encodings=("hybrid",)) + + self.assertIn("hybrid", str(exp.exception)) + + def test_decoding_with_unknown_format(self): + with self.assertRaises(ValueError) as e: + VerifyingKey.from_string(b"", valid_encodings=("raw", "foobar")) + + self.assertIn("Only uncompressed, compressed", str(e.exception)) + + def test_uncompressed_decoding_with_blocked_format(self): + enc = b( + "\x04" + "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + with self.assertRaises(MalformedPointError) as exp: + VerifyingKey.from_string(enc, valid_encodings=("hybrid",)) + + self.assertIn("Invalid X9.62 encoding", str(exp.exception)) + + def test_hybrid_decoding_with_blocked_format(self): + enc = b( + "\x06" + "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + with self.assertRaises(MalformedPointError) as exp: + VerifyingKey.from_string(enc, valid_encodings=("uncompressed",)) + + self.assertIn("Invalid X9.62 encoding", str(exp.exception)) + + def test_compressed_decoding_with_blocked_format(self): + enc = b( + "\x02" + "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + )[:25] + with self.assertRaises(MalformedPointError) as exp: + VerifyingKey.from_string(enc, valid_encodings=("hybrid", "raw")) + + self.assertIn("(hybrid, raw)", str(exp.exception)) + + def test_decoding_with_malformed_uncompressed(self): + enc = b( + "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b("\x02") + enc) + + def test_decoding_with_malformed_compressed(self): + enc = b( + "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b("\x01") + enc[:24]) + + def test_decoding_with_inconsistent_hybrid(self): + enc = b( + "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b("\x07") + enc) + + def test_decoding_with_point_not_on_curve(self): + enc = b( + "\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + "\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + "z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(enc[:47] + b("\x00")) + + def test_decoding_with_point_at_infinity(self): + # decoding it is unsupported, as it's not necessary to encode it + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b("\x00")) + + def test_not_lying_on_curve(self): + enc = number_to_string(NIST192p.curve.p(), NIST192p.curve.p() + 1) + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b("\x02") + enc) + + def test_from_string_with_invalid_curve_too_short_ver_key_len(self): + # both verifying_key_length and baselen are calculated internally + # by the Curve constructor, but since we depend on them verify + # that inconsistent values are detected + curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2)) + curve.verifying_key_length = 16 + curve.baselen = 32 + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b("\x00") * 16, curve) + + def test_from_string_with_invalid_curve_too_long_ver_key_len(self): + # both verifying_key_length and baselen are calculated internally + # by the Curve constructor, but since we depend on them verify + # that inconsistent values are detected + curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2)) + curve.verifying_key_length = 16 + curve.baselen = 16 + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b("\x00") * 16, curve) + + +@pytest.mark.parametrize( + "val,even", [(i, j) for i in range(256) for j in [True, False]] +) +def test_VerifyingKey_decode_with_small_values(val, even): + enc = number_to_string(val, NIST192p.order) + + if even: + enc = b("\x02") + enc + else: + enc = b("\x03") + enc + + # small values can both be actual valid public keys and not, verify that + # only expected exceptions are raised if they are not + try: + vk = VerifyingKey.from_string(enc) + assert isinstance(vk, VerifyingKey) + except MalformedPointError: + assert True + + +params = [] +for curve in curves: + for enc in ["raw", "uncompressed", "compressed", "hybrid"]: + params.append( + pytest.param(curve, enc, id="{0}-{1}".format(curve.name, enc)) + ) + + +@pytest.mark.parametrize("curve,encoding", params) +def test_VerifyingKey_encode_decode(curve, encoding): + sk = SigningKey.generate(curve=curve) + vk = sk.verifying_key + + encoded = vk.to_string(encoding) + + from_enc = VerifyingKey.from_string(encoded, curve=curve) + + assert vk.pubkey.point == from_enc.pubkey.point + + +class OpenSSL(unittest.TestCase): + # test interoperability with OpenSSL tools. Note that openssl's ECDSA + # sign/verify arguments changed between 0.9.8 and 1.0.0: the early + # versions require "-ecdsa-with-SHA1", the later versions want just + # "-SHA1" (or to leave out that argument entirely, which means the + # signature will use some default digest algorithm, probably determined + # by the key, probably always SHA1). + # + # openssl ecparam -name secp224r1 -genkey -out privkey.pem + # openssl ec -in privkey.pem -text -noout # get the priv/pub keys + # openssl dgst -ecdsa-with-SHA1 -sign privkey.pem -out data.sig data.txt + # openssl asn1parse -in data.sig -inform DER + # data.sig is 64 bytes, probably 56b plus ASN1 overhead + # openssl dgst -ecdsa-with-SHA1 -prverify privkey.pem -signature data.sig data.txt ; echo $? + # openssl ec -in privkey.pem -pubout -out pubkey.pem + # openssl ec -in privkey.pem -pubout -outform DER -out pubkey.der + + OPENSSL_SUPPORTED_CURVES = set( + c.split(":")[0].strip() + for c in run_openssl("ecparam -list_curves").split("\n") + ) + + def get_openssl_messagedigest_arg(self, hash_name): + v = run_openssl("version") + # e.g. "OpenSSL 1.0.0 29 Mar 2010", or "OpenSSL 1.0.0a 1 Jun 2010", + # or "OpenSSL 0.9.8o 01 Jun 2010" + vs = v.split()[1].split(".") + if vs >= ["1", "0", "0"]: # pragma: no cover + return "-{0}".format(hash_name) + else: # pragma: no cover + return "-ecdsa-with-{0}".format(hash_name) + + # sk: 1:OpenSSL->python 2:python->OpenSSL + # vk: 3:OpenSSL->python 4:python->OpenSSL + # sig: 5:OpenSSL->python 6:python->OpenSSL + + @pytest.mark.skipif( + "secp112r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp112r1", + ) + def test_from_openssl_secp112r1(self): + return self.do_test_from_openssl(SECP112r1) + + @pytest.mark.skipif( + "secp112r2" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp112r2", + ) + def test_from_openssl_secp112r2(self): + return self.do_test_from_openssl(SECP112r2) + + @pytest.mark.skipif( + "secp128r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp128r1", + ) + def test_from_openssl_secp128r1(self): + return self.do_test_from_openssl(SECP128r1) + + @pytest.mark.skipif( + "secp160r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp160r1", + ) + def test_from_openssl_secp160r1(self): + return self.do_test_from_openssl(SECP160r1) + + @pytest.mark.skipif( + "prime192v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime192v1", + ) + def test_from_openssl_nist192p(self): + return self.do_test_from_openssl(NIST192p) + + @pytest.mark.skipif( + "prime192v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime192v1", + ) + def test_from_openssl_nist192p_sha256(self): + return self.do_test_from_openssl(NIST192p, "SHA256") + + @pytest.mark.skipif( + "secp224r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp224r1", + ) + def test_from_openssl_nist224p(self): + return self.do_test_from_openssl(NIST224p) + + @pytest.mark.skipif( + "prime256v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime256v1", + ) + def test_from_openssl_nist256p(self): + return self.do_test_from_openssl(NIST256p) + + @pytest.mark.skipif( + "prime256v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime256v1", + ) + def test_from_openssl_nist256p_sha384(self): + return self.do_test_from_openssl(NIST256p, "SHA384") + + @pytest.mark.skipif( + "prime256v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime256v1", + ) + def test_from_openssl_nist256p_sha512(self): + return self.do_test_from_openssl(NIST256p, "SHA512") + + @pytest.mark.skipif( + "secp384r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp384r1", + ) + def test_from_openssl_nist384p(self): + return self.do_test_from_openssl(NIST384p) + + @pytest.mark.skipif( + "secp521r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp521r1", + ) + def test_from_openssl_nist521p(self): + return self.do_test_from_openssl(NIST521p) + + @pytest.mark.skipif( + "secp256k1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp256k1", + ) + def test_from_openssl_secp256k1(self): + return self.do_test_from_openssl(SECP256k1) + + @pytest.mark.skipif( + "brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP160r1", + ) + def test_from_openssl_brainpoolp160r1(self): + return self.do_test_from_openssl(BRAINPOOLP160r1) + + @pytest.mark.skipif( + "brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP192r1", + ) + def test_from_openssl_brainpoolp192r1(self): + return self.do_test_from_openssl(BRAINPOOLP192r1) + + @pytest.mark.skipif( + "brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP224r1", + ) + def test_from_openssl_brainpoolp224r1(self): + return self.do_test_from_openssl(BRAINPOOLP224r1) + + @pytest.mark.skipif( + "brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP256r1", + ) + def test_from_openssl_brainpoolp256r1(self): + return self.do_test_from_openssl(BRAINPOOLP256r1) + + @pytest.mark.skipif( + "brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP320r1", + ) + def test_from_openssl_brainpoolp320r1(self): + return self.do_test_from_openssl(BRAINPOOLP320r1) + + @pytest.mark.skipif( + "brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP384r1", + ) + def test_from_openssl_brainpoolp384r1(self): + return self.do_test_from_openssl(BRAINPOOLP384r1) + + @pytest.mark.skipif( + "brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP512r1", + ) + def test_from_openssl_brainpoolp512r1(self): + return self.do_test_from_openssl(BRAINPOOLP512r1) + + def do_test_from_openssl(self, curve, hash_name="SHA1"): + curvename = curve.openssl_name + assert curvename + # OpenSSL: create sk, vk, sign. + # Python: read vk(3), checksig(5), read sk(1), sign, check + mdarg = self.get_openssl_messagedigest_arg(hash_name) + if os.path.isdir("t"): # pragma: no cover + shutil.rmtree("t") + os.mkdir("t") + run_openssl("ecparam -name %s -genkey -out t/privkey.pem" % curvename) + run_openssl("ec -in t/privkey.pem -pubout -out t/pubkey.pem") + data = b("data") + with open("t/data.txt", "wb") as e: + e.write(data) + run_openssl( + "dgst %s -sign t/privkey.pem -out t/data.sig t/data.txt" % mdarg + ) + run_openssl( + "dgst %s -verify t/pubkey.pem -signature t/data.sig t/data.txt" + % mdarg + ) + with open("t/pubkey.pem", "rb") as e: + pubkey_pem = e.read() + vk = VerifyingKey.from_pem(pubkey_pem) # 3 + with open("t/data.sig", "rb") as e: + sig_der = e.read() + self.assertTrue( + vk.verify( + sig_der, + data, # 5 + hashfunc=partial(hashlib.new, hash_name), + sigdecode=sigdecode_der, + ) + ) + + with open("t/privkey.pem") as e: + fp = e.read() + sk = SigningKey.from_pem(fp) # 1 + sig = sk.sign(data, hashfunc=partial(hashlib.new, hash_name)) + self.assertTrue( + vk.verify(sig, data, hashfunc=partial(hashlib.new, hash_name)) + ) + + run_openssl( + "pkcs8 -topk8 -nocrypt " + "-in t/privkey.pem -outform pem -out t/privkey-p8.pem" + ) + with open("t/privkey-p8.pem", "rb") as e: + privkey_p8_pem = e.read() + sk_from_p8 = SigningKey.from_pem(privkey_p8_pem) + self.assertEqual(sk, sk_from_p8) + + @pytest.mark.skipif( + "secp112r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp112r1", + ) + def test_to_openssl_secp112r1(self): + self.do_test_to_openssl(SECP112r1) + + @pytest.mark.skipif( + "secp112r2" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp112r2", + ) + def test_to_openssl_secp112r2(self): + self.do_test_to_openssl(SECP112r2) + + @pytest.mark.skipif( + "secp128r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp128r1", + ) + def test_to_openssl_secp128r1(self): + self.do_test_to_openssl(SECP128r1) + + @pytest.mark.skipif( + "secp160r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp160r1", + ) + def test_to_openssl_secp160r1(self): + self.do_test_to_openssl(SECP160r1) + + @pytest.mark.skipif( + "prime192v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime192v1", + ) + def test_to_openssl_nist192p(self): + self.do_test_to_openssl(NIST192p) + + @pytest.mark.skipif( + "prime192v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime192v1", + ) + def test_to_openssl_nist192p_sha256(self): + self.do_test_to_openssl(NIST192p, "SHA256") + + @pytest.mark.skipif( + "secp224r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp224r1", + ) + def test_to_openssl_nist224p(self): + self.do_test_to_openssl(NIST224p) + + @pytest.mark.skipif( + "prime256v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime256v1", + ) + def test_to_openssl_nist256p(self): + self.do_test_to_openssl(NIST256p) + + @pytest.mark.skipif( + "prime256v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime256v1", + ) + def test_to_openssl_nist256p_sha384(self): + self.do_test_to_openssl(NIST256p, "SHA384") + + @pytest.mark.skipif( + "prime256v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime256v1", + ) + def test_to_openssl_nist256p_sha512(self): + self.do_test_to_openssl(NIST256p, "SHA512") + + @pytest.mark.skipif( + "secp384r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp384r1", + ) + def test_to_openssl_nist384p(self): + self.do_test_to_openssl(NIST384p) + + @pytest.mark.skipif( + "secp521r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp521r1", + ) + def test_to_openssl_nist521p(self): + self.do_test_to_openssl(NIST521p) + + @pytest.mark.skipif( + "secp256k1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp256k1", + ) + def test_to_openssl_secp256k1(self): + self.do_test_to_openssl(SECP256k1) + + @pytest.mark.skipif( + "brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP160r1", + ) + def test_to_openssl_brainpoolp160r1(self): + self.do_test_to_openssl(BRAINPOOLP160r1) + + @pytest.mark.skipif( + "brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP192r1", + ) + def test_to_openssl_brainpoolp192r1(self): + self.do_test_to_openssl(BRAINPOOLP192r1) + + @pytest.mark.skipif( + "brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP224r1", + ) + def test_to_openssl_brainpoolp224r1(self): + self.do_test_to_openssl(BRAINPOOLP224r1) + + @pytest.mark.skipif( + "brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP256r1", + ) + def test_to_openssl_brainpoolp256r1(self): + self.do_test_to_openssl(BRAINPOOLP256r1) + + @pytest.mark.skipif( + "brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP320r1", + ) + def test_to_openssl_brainpoolp320r1(self): + self.do_test_to_openssl(BRAINPOOLP320r1) + + @pytest.mark.skipif( + "brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP384r1", + ) + def test_to_openssl_brainpoolp384r1(self): + self.do_test_to_openssl(BRAINPOOLP384r1) + + @pytest.mark.skipif( + "brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP512r1", + ) + def test_to_openssl_brainpoolp512r1(self): + self.do_test_to_openssl(BRAINPOOLP512r1) + + def do_test_to_openssl(self, curve, hash_name="SHA1"): + curvename = curve.openssl_name + assert curvename + # Python: create sk, vk, sign. + # OpenSSL: read vk(4), checksig(6), read sk(2), sign, check + mdarg = self.get_openssl_messagedigest_arg(hash_name) + if os.path.isdir("t"): # pragma: no cover + shutil.rmtree("t") + os.mkdir("t") + sk = SigningKey.generate(curve=curve) + vk = sk.get_verifying_key() + data = b("data") + with open("t/pubkey.der", "wb") as e: + e.write(vk.to_der()) # 4 + with open("t/pubkey.pem", "wb") as e: + e.write(vk.to_pem()) # 4 + sig_der = sk.sign( + data, + hashfunc=partial(hashlib.new, hash_name), + sigencode=sigencode_der, + ) + + with open("t/data.sig", "wb") as e: + e.write(sig_der) # 6 + with open("t/data.txt", "wb") as e: + e.write(data) + with open("t/baddata.txt", "wb") as e: + e.write(data + b("corrupt")) + + self.assertRaises( + SubprocessError, + run_openssl, + "dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/baddata.txt" + % mdarg, + ) + run_openssl( + "dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/data.txt" + % mdarg + ) + + with open("t/privkey.pem", "wb") as e: + e.write(sk.to_pem()) # 2 + run_openssl( + "dgst %s -sign t/privkey.pem -out t/data.sig2 t/data.txt" % mdarg + ) + run_openssl( + "dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" + % mdarg + ) + + with open("t/privkey-explicit.pem", "wb") as e: + e.write(sk.to_pem(curve_parameters_encoding="explicit")) + run_openssl( + "dgst %s -sign t/privkey-explicit.pem -out t/data.sig2 t/data.txt" + % mdarg + ) + run_openssl( + "dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" + % mdarg + ) + + with open("t/privkey-p8.pem", "wb") as e: + e.write(sk.to_pem(format="pkcs8")) + run_openssl( + "dgst %s -sign t/privkey-p8.pem -out t/data.sig3 t/data.txt" + % mdarg + ) + run_openssl( + "dgst %s -verify t/pubkey.pem -signature t/data.sig3 t/data.txt" + % mdarg + ) + + with open("t/privkey-p8-explicit.pem", "wb") as e: + e.write( + sk.to_pem(format="pkcs8", curve_parameters_encoding="explicit") + ) + run_openssl( + "dgst %s -sign t/privkey-p8-explicit.pem -out t/data.sig3 t/data.txt" + % mdarg + ) + run_openssl( + "dgst %s -verify t/pubkey.pem -signature t/data.sig3 t/data.txt" + % mdarg + ) + + +class TooSmallCurve(unittest.TestCase): + OPENSSL_SUPPORTED_CURVES = set( + c.split(":")[0].strip() + for c in run_openssl("ecparam -list_curves").split("\n") + ) + + @pytest.mark.skipif( + "prime192v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime192v1", + ) + def test_sign_too_small_curve_dont_allow_truncate_raises(self): + sk = SigningKey.generate(curve=NIST192p) + data = b("data") + with self.assertRaises(BadDigestError): + sk.sign( + data, + hashfunc=partial(hashlib.new, "SHA256"), + sigencode=sigencode_der, + allow_truncate=False, + ) + + @pytest.mark.skipif( + "prime192v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime192v1", + ) + def test_verify_too_small_curve_dont_allow_truncate_raises(self): + sk = SigningKey.generate(curve=NIST192p) + vk = sk.get_verifying_key() + data = b("data") + sig_der = sk.sign( + data, + hashfunc=partial(hashlib.new, "SHA256"), + sigencode=sigencode_der, + allow_truncate=True, + ) + with self.assertRaises(BadDigestError): + vk.verify( + sig_der, + data, + hashfunc=partial(hashlib.new, "SHA256"), + sigdecode=sigdecode_der, + allow_truncate=False, + ) + + +class DER(unittest.TestCase): + def test_integer(self): + self.assertEqual(der.encode_integer(0), b("\x02\x01\x00")) + self.assertEqual(der.encode_integer(1), b("\x02\x01\x01")) + self.assertEqual(der.encode_integer(127), b("\x02\x01\x7f")) + self.assertEqual(der.encode_integer(128), b("\x02\x02\x00\x80")) + self.assertEqual(der.encode_integer(256), b("\x02\x02\x01\x00")) + # self.assertEqual(der.encode_integer(-1), b("\x02\x01\xff")) + + def s(n): + return der.remove_integer(der.encode_integer(n) + b("junk")) + + self.assertEqual(s(0), (0, b("junk"))) + self.assertEqual(s(1), (1, b("junk"))) + self.assertEqual(s(127), (127, b("junk"))) + self.assertEqual(s(128), (128, b("junk"))) + self.assertEqual(s(256), (256, b("junk"))) + self.assertEqual( + s(1234567890123456789012345678901234567890), + (1234567890123456789012345678901234567890, b("junk")), + ) + + def test_number(self): + self.assertEqual(der.encode_number(0), b("\x00")) + self.assertEqual(der.encode_number(127), b("\x7f")) + self.assertEqual(der.encode_number(128), b("\x81\x00")) + self.assertEqual(der.encode_number(3 * 128 + 7), b("\x83\x07")) + # self.assertEqual(der.read_number("\x81\x9b" + "more"), (155, 2)) + # self.assertEqual(der.encode_number(155), b("\x81\x9b")) + for n in (0, 1, 2, 127, 128, 3 * 128 + 7, 840, 10045): # , 155): + x = der.encode_number(n) + b("more") + n1, llen = der.read_number(x) + self.assertEqual(n1, n) + self.assertEqual(x[llen:], b("more")) + + def test_length(self): + self.assertEqual(der.encode_length(0), b("\x00")) + self.assertEqual(der.encode_length(127), b("\x7f")) + self.assertEqual(der.encode_length(128), b("\x81\x80")) + self.assertEqual(der.encode_length(255), b("\x81\xff")) + self.assertEqual(der.encode_length(256), b("\x82\x01\x00")) + self.assertEqual(der.encode_length(3 * 256 + 7), b("\x82\x03\x07")) + self.assertEqual(der.read_length(b("\x81\x9b") + b("more")), (155, 2)) + self.assertEqual(der.encode_length(155), b("\x81\x9b")) + for n in (0, 1, 2, 127, 128, 255, 256, 3 * 256 + 7, 155): + x = der.encode_length(n) + b("more") + n1, llen = der.read_length(x) + self.assertEqual(n1, n) + self.assertEqual(x[llen:], b("more")) + + def test_sequence(self): + x = der.encode_sequence(b("ABC"), b("DEF")) + b("GHI") + self.assertEqual(x, b("\x30\x06ABCDEFGHI")) + x1, rest = der.remove_sequence(x) + self.assertEqual(x1, b("ABCDEF")) + self.assertEqual(rest, b("GHI")) + + def test_constructed(self): + x = der.encode_constructed(0, NIST224p.encoded_oid) + self.assertEqual(hexlify(x), b("a007") + b("06052b81040021")) + x = der.encode_constructed(1, unhexlify(b("0102030a0b0c"))) + self.assertEqual(hexlify(x), b("a106") + b("0102030a0b0c")) + + +class Util(unittest.TestCase): + def test_trytryagain(self): + tta = util.randrange_from_seed__trytryagain + for i in range(1000): + seed = "seed-%d" % i + for order in ( + 2 ** 8 - 2, + 2 ** 8 - 1, + 2 ** 8, + 2 ** 8 + 1, + 2 ** 8 + 2, + 2 ** 16 - 1, + 2 ** 16 + 1, + ): + n = tta(seed, order) + self.assertTrue(1 <= n < order, (1, n, order)) + # this trytryagain *does* provide long-term stability + self.assertEqual( + ("%x" % (tta("seed", NIST224p.order))).encode(), + b("6fa59d73bf0446ae8743cf748fc5ac11d5585a90356417e97155c3bc"), + ) + + def test_trytryagain_single(self): + tta = util.randrange_from_seed__trytryagain + order = 2 ** 8 - 2 + seed = b"text" + n = tta(seed, order) + # known issue: https://github.com/warner/python-ecdsa/issues/221 + if sys.version_info < (3, 0): # pragma: no branch + self.assertEqual(n, 228) + else: + self.assertEqual(n, 18) + + @given(st.integers(min_value=0, max_value=10 ** 200)) + def test_randrange(self, i): + # util.randrange does not provide long-term stability: we might + # change the algorithm in the future. + entropy = util.PRNG("seed-%d" % i) + for order in ( + 2 ** 8 - 2, + 2 ** 8 - 1, + 2 ** 8, + 2 ** 16 - 1, + 2 ** 16 + 1, + ): + # that oddball 2**16+1 takes half our runtime + n = util.randrange(order, entropy=entropy) + self.assertTrue(1 <= n < order, (1, n, order)) + + def OFF_test_prove_uniformity(self): # pragma: no cover + order = 2 ** 8 - 2 + counts = dict([(i, 0) for i in range(1, order)]) + assert 0 not in counts + assert order not in counts + for i in range(1000000): + seed = "seed-%d" % i + n = util.randrange_from_seed__trytryagain(seed, order) + counts[n] += 1 + # this technique should use the full range + self.assertTrue(counts[order - 1]) + for i in range(1, order): + print_("%3d: %s" % (i, "*" * (counts[i] // 100))) + + +class RFC6979(unittest.TestCase): + # https://tools.ietf.org/html/rfc6979#appendix-A.1 + def _do(self, generator, secexp, hsh, hash_func, expected): + actual = rfc6979.generate_k(generator.order(), secexp, hash_func, hsh) + self.assertEqual(expected, actual) + + def test_SECP256k1(self): + """RFC doesn't contain test vectors for SECP256k1 used in bitcoin. + This vector has been computed by Golang reference implementation instead.""" + self._do( + generator=SECP256k1.generator, + secexp=int("9d0219792467d7d37b4d43298a7d0c05", 16), + hsh=sha256(b("sample")).digest(), + hash_func=sha256, + expected=int( + "8fa1f95d514760e498f28957b824ee6ec39ed64826ff4fecc2b5739ec45b91cd", + 16, + ), + ) + + def test_SECP256k1_2(self): + self._do( + generator=SECP256k1.generator, + secexp=int( + "cca9fbcc1b41e5a95d369eaa6ddcff73b61a4efaa279cfc6567e8daa39cbaf50", + 16, + ), + hsh=sha256(b("sample")).digest(), + hash_func=sha256, + expected=int( + "2df40ca70e639d89528a6b670d9d48d9165fdc0febc0974056bdce192b8e16a3", + 16, + ), + ) + + def test_SECP256k1_3(self): + self._do( + generator=SECP256k1.generator, + secexp=0x1, + hsh=sha256(b("Satoshi Nakamoto")).digest(), + hash_func=sha256, + expected=0x8F8A276C19F4149656B280621E358CCE24F5F52542772691EE69063B74F15D15, + ) + + def test_SECP256k1_4(self): + self._do( + generator=SECP256k1.generator, + secexp=0x1, + hsh=sha256( + b( + "All those moments will be lost in time, like tears in rain. Time to die..." + ) + ).digest(), + hash_func=sha256, + expected=0x38AA22D72376B4DBC472E06C3BA403EE0A394DA63FC58D88686C611ABA98D6B3, + ) + + def test_SECP256k1_5(self): + self._do( + generator=SECP256k1.generator, + secexp=0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364140, + hsh=sha256(b("Satoshi Nakamoto")).digest(), + hash_func=sha256, + expected=0x33A19B60E25FB6F4435AF53A3D42D493644827367E6453928554F43E49AA6F90, + ) + + def test_SECP256k1_6(self): + self._do( + generator=SECP256k1.generator, + secexp=0xF8B8AF8CE3C7CCA5E300D33939540C10D45CE001B8F252BFBC57BA0342904181, + hsh=sha256(b("Alan Turing")).digest(), + hash_func=sha256, + expected=0x525A82B70E67874398067543FD84C83D30C175FDC45FDEEE082FE13B1D7CFDF1, + ) + + def test_1(self): + # Basic example of the RFC, it also tests 'try-try-again' from Step H of rfc6979 + self._do( + generator=Point( + None, + 0, + 0, + int("4000000000000000000020108A2E0CC0D99F8A5EF", 16), + ), + secexp=int("09A4D6792295A7F730FC3F2B49CBC0F62E862272F", 16), + hsh=unhexlify( + b( + "AF2BDBE1AA9B6EC1E2ADE1D694F41FC71A831D0268E9891562113D8A62ADD1BF" + ) + ), + hash_func=sha256, + expected=int("23AF4074C90A02B3FE61D286D5C87F425E6BDD81B", 16), + ) + + def test_2(self): + self._do( + generator=NIST192p.generator, + secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), + hsh=sha1(b("sample")).digest(), + hash_func=sha1, + expected=int( + "37D7CA00D2C7B0E5E412AC03BD44BA837FDD5B28CD3B0021", 16 + ), + ) + + def test_3(self): + self._do( + generator=NIST192p.generator, + secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), + hsh=sha256(b("sample")).digest(), + hash_func=sha256, + expected=int( + "32B1B6D7D42A05CB449065727A84804FB1A3E34D8F261496", 16 + ), + ) + + def test_4(self): + self._do( + generator=NIST192p.generator, + secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), + hsh=sha512(b("sample")).digest(), + hash_func=sha512, + expected=int( + "A2AC7AB055E4F20692D49209544C203A7D1F2C0BFBC75DB1", 16 + ), + ) + + def test_5(self): + self._do( + generator=NIST192p.generator, + secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), + hsh=sha1(b("test")).digest(), + hash_func=sha1, + expected=int( + "D9CF9C3D3297D3260773A1DA7418DB5537AB8DD93DE7FA25", 16 + ), + ) + + def test_6(self): + self._do( + generator=NIST192p.generator, + secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), + hsh=sha256(b("test")).digest(), + hash_func=sha256, + expected=int( + "5C4CE89CF56D9E7C77C8585339B006B97B5F0680B4306C6C", 16 + ), + ) + + def test_7(self): + self._do( + generator=NIST192p.generator, + secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), + hsh=sha512(b("test")).digest(), + hash_func=sha512, + expected=int( + "0758753A5254759C7CFBAD2E2D9B0792EEE44136C9480527", 16 + ), + ) + + def test_8(self): + self._do( + generator=NIST521p.generator, + secexp=int( + "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", + 16, + ), + hsh=sha1(b("sample")).digest(), + hash_func=sha1, + expected=int( + "089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9", + 16, + ), + ) + + def test_9(self): + self._do( + generator=NIST521p.generator, + secexp=int( + "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", + 16, + ), + hsh=sha256(b("sample")).digest(), + hash_func=sha256, + expected=int( + "0EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0", + 16, + ), + ) + + def test_10(self): + self._do( + generator=NIST521p.generator, + secexp=int( + "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", + 16, + ), + hsh=sha512(b("test")).digest(), + hash_func=sha512, + expected=int( + "16200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D", + 16, + ), + ) + + +class ECDH(unittest.TestCase): + def _do(self, curve, generator, dA, x_qA, y_qA, dB, x_qB, y_qB, x_Z, y_Z): + qA = dA * generator + qB = dB * generator + Z = dA * qB + self.assertEqual(Point(curve, x_qA, y_qA), qA) + self.assertEqual(Point(curve, x_qB, y_qB), qB) + self.assertTrue( + (dA * qB) + == (dA * dB * generator) + == (dB * dA * generator) + == (dB * qA) + ) + self.assertEqual(Point(curve, x_Z, y_Z), Z) + + +class RFC6932(ECDH): + # https://tools.ietf.org/html/rfc6932#appendix-A.1 + + def test_brainpoolP224r1(self): + self._do( + curve=curve_brainpoolp224r1, + generator=BRAINPOOLP224r1.generator, + dA=int( + "7C4B7A2C8A4BAD1FBB7D79CC0955DB7C6A4660CA64CC4778159B495E", 16 + ), + x_qA=int( + "B104A67A6F6E85E14EC1825E1539E8ECDBBF584922367DD88C6BDCF2", 16 + ), + y_qA=int( + "46D782E7FDB5F60CD8404301AC5949C58EDB26BC68BA07695B750A94", 16 + ), + dB=int( + "63976D4AAE6CD0F6DD18DEFEF55D96569D0507C03E74D6486FFA28FB", 16 + ), + x_qB=int( + "2A97089A9296147B71B21A4B574E1278245B536F14D8C2B9D07A874E", 16 + ), + y_qB=int( + "9B900D7C77A709A797276B8CA1BA61BB95B546FC29F862E44D59D25B", 16 + ), + x_Z=int( + "312DFD98783F9FB77B9704945A73BEB6DCCBE3B65D0F967DCAB574EB", 16 + ), + y_Z=int( + "6F800811D64114B1C48C621AB3357CF93F496E4238696A2A012B3C98", 16 + ), + ) + + def test_brainpoolP256r1(self): + self._do( + curve=curve_brainpoolp256r1, + generator=BRAINPOOLP256r1.generator, + dA=int( + "041EB8B1E2BC681BCE8E39963B2E9FC415B05283313DD1A8BCC055F11AE" + "49699", + 16, + ), + x_qA=int( + "78028496B5ECAAB3C8B6C12E45DB1E02C9E4D26B4113BC4F015F60C5C" + "CC0D206", + 16, + ), + y_qA=int( + "A2AE1762A3831C1D20F03F8D1E3C0C39AFE6F09B4D44BBE80CD100987" + "B05F92B", + 16, + ), + dB=int( + "06F5240EACDB9837BC96D48274C8AA834B6C87BA9CC3EEDD81F99A16B8D" + "804D3", + 16, + ), + x_qB=int( + "8E07E219BA588916C5B06AA30A2F464C2F2ACFC1610A3BE2FB240B635" + "341F0DB", + 16, + ), + y_qB=int( + "148EA1D7D1E7E54B9555B6C9AC90629C18B63BEE5D7AA6949EBBF47B2" + "4FDE40D", + 16, + ), + x_Z=int( + "05E940915549E9F6A4A75693716E37466ABA79B4BF2919877A16DD2CC2" + "E23708", + 16, + ), + y_Z=int( + "6BC23B6702BC5A019438CEEA107DAAD8B94232FFBBC350F3B137628FE6" + "FD134C", + 16, + ), + ) + + def test_brainpoolP384r1(self): + self._do( + curve=curve_brainpoolp384r1, + generator=BRAINPOOLP384r1.generator, + dA=int( + "014EC0755B78594BA47FB0A56F6173045B4331E74BA1A6F47322E70D79D" + "828D97E095884CA72B73FDABD5910DF0FA76A", + 16, + ), + x_qA=int( + "45CB26E4384DAF6FB776885307B9A38B7AD1B5C692E0C32F012533277" + "8F3B8D3F50CA358099B30DEB5EE69A95C058B4E", + 16, + ), + y_qA=int( + "8173A1C54AFFA7E781D0E1E1D12C0DC2B74F4DF58E4A4E3AF7026C5D3" + "2DC530A2CD89C859BB4B4B768497F49AB8CC859", + 16, + ), + dB=int( + "6B461CB79BD0EA519A87D6828815D8CE7CD9B3CAA0B5A8262CBCD550A01" + "5C90095B976F3529957506E1224A861711D54", + 16, + ), + x_qB=int( + "01BF92A92EE4BE8DED1A911125C209B03F99E3161CFCC986DC7711383" + "FC30AF9CE28CA3386D59E2C8D72CE1E7B4666E8", + 16, + ), + y_qB=int( + "3289C4A3A4FEE035E39BDB885D509D224A142FF9FBCC5CFE5CCBB3026" + "8EE47487ED8044858D31D848F7A95C635A347AC", + 16, + ), + x_Z=int( + "04CC4FF3DCCCB07AF24E0ACC529955B36D7C807772B92FCBE48F3AFE9A" + "2F370A1F98D3FA73FD0C0747C632E12F1423EC", + 16, + ), + y_Z=int( + "7F465F90BD69AFB8F828A214EB9716D66ABC59F17AF7C75EE7F1DE22AB" + "5D05085F5A01A9382D05BF72D96698FE3FF64E", + 16, + ), + ) + + def test_brainpoolP512r1(self): + self._do( + curve=curve_brainpoolp512r1, + generator=BRAINPOOLP512r1.generator, + dA=int( + "636B6BE0482A6C1C41AA7AE7B245E983392DB94CECEA2660A379CFE1595" + "59E357581825391175FC195D28BAC0CF03A7841A383B95C262B98378287" + "4CCE6FE333", + 16, + ), + x_qA=int( + "0562E68B9AF7CBFD5565C6B16883B777FF11C199161ECC427A39D17EC" + "2166499389571D6A994977C56AD8252658BA8A1B72AE42F4FB7532151" + "AFC3EF0971CCDA", + 16, + ), + y_qA=int( + "A7CA2D8191E21776A89860AFBC1F582FAA308D551C1DC6133AF9F9C3C" + "AD59998D70079548140B90B1F311AFB378AA81F51B275B2BE6B7DEE97" + "8EFC7343EA642E", + 16, + ), + dB=int( + "0AF4E7F6D52EDD52907BB8DBAB3992A0BB696EC10DF11892FF205B66D38" + "1ECE72314E6A6EA079CEA06961DBA5AE6422EF2E9EE803A1F236FB96A17" + "99B86E5C8B", + 16, + ), + x_qB=int( + "5A7954E32663DFF11AE24712D87419F26B708AC2B92877D6BFEE2BFC4" + "3714D89BBDB6D24D807BBD3AEB7F0C325F862E8BADE4F74636B97EAAC" + "E739E11720D323", + 16, + ), + y_qB=int( + "96D14621A9283A1BED84DE8DD64836B2C0758B11441179DC0C54C0D49" + "A47C03807D171DD544B72CAAEF7B7CE01C7753E2CAD1A861ECA55A719" + "54EE1BA35E04BE", + 16, + ), + x_Z=int( + "1EE8321A4BBF93B9CF8921AB209850EC9B7066D1984EF08C2BB7232362" + "08AC8F1A483E79461A00E0D5F6921CE9D360502F85C812BEDEE23AC5B2" + "10E5811B191E", + 16, + ), + y_Z=int( + "2632095B7B936174B41FD2FAF369B1D18DCADEED7E410A7E251F083109" + "7C50D02CFED02607B6A2D5ADB4C0006008562208631875B58B54ECDA5A" + "4F9FE9EAABA6", + 16, + ), + ) + + +class RFC7027(ECDH): + # https://tools.ietf.org/html/rfc7027#appendix-A + + def test_brainpoolP256r1(self): + self._do( + curve=curve_brainpoolp256r1, + generator=BRAINPOOLP256r1.generator, + dA=int( + "81DB1EE100150FF2EA338D708271BE38300CB54241D79950F77B0630398" + "04F1D", + 16, + ), + x_qA=int( + "44106E913F92BC02A1705D9953A8414DB95E1AAA49E81D9E85F929A8E" + "3100BE5", + 16, + ), + y_qA=int( + "8AB4846F11CACCB73CE49CBDD120F5A900A69FD32C272223F789EF10E" + "B089BDC", + 16, + ), + dB=int( + "55E40BC41E37E3E2AD25C3C6654511FFA8474A91A0032087593852D3E7D" + "76BD3", + 16, + ), + x_qB=int( + "8D2D688C6CF93E1160AD04CC4429117DC2C41825E1E9FCA0ADDD34E6F" + "1B39F7B", + 16, + ), + y_qB=int( + "990C57520812BE512641E47034832106BC7D3E8DD0E4C7F1136D70065" + "47CEC6A", + 16, + ), + x_Z=int( + "89AFC39D41D3B327814B80940B042590F96556EC91E6AE7939BCE31F3A" + "18BF2B", + 16, + ), + y_Z=int( + "49C27868F4ECA2179BFD7D59B1E3BF34C1DBDE61AE12931648F43E5963" + "2504DE", + 16, + ), + ) + + def test_brainpoolP384r1(self): + self._do( + curve=curve_brainpoolp384r1, + generator=BRAINPOOLP384r1.generator, + dA=int( + "1E20F5E048A5886F1F157C74E91BDE2B98C8B52D58E5003D57053FC4B0B" + "D65D6F15EB5D1EE1610DF870795143627D042", + 16, + ), + x_qA=int( + "68B665DD91C195800650CDD363C625F4E742E8134667B767B1B476793" + "588F885AB698C852D4A6E77A252D6380FCAF068", + 16, + ), + y_qA=int( + "55BC91A39C9EC01DEE36017B7D673A931236D2F1F5C83942D049E3FA2" + "0607493E0D038FF2FD30C2AB67D15C85F7FAA59", + 16, + ), + dB=int( + "032640BC6003C59260F7250C3DB58CE647F98E1260ACCE4ACDA3DD869F7" + "4E01F8BA5E0324309DB6A9831497ABAC96670", + 16, + ), + x_qB=int( + "4D44326F269A597A5B58BBA565DA5556ED7FD9A8A9EB76C25F46DB69D" + "19DC8CE6AD18E404B15738B2086DF37E71D1EB4", + 16, + ), + y_qB=int( + "62D692136DE56CBE93BF5FA3188EF58BC8A3A0EC6C1E151A21038A42E" + "9185329B5B275903D192F8D4E1F32FE9CC78C48", + 16, + ), + x_Z=int( + "0BD9D3A7EA0B3D519D09D8E48D0785FB744A6B355E6304BC51C229FBBC" + "E239BBADF6403715C35D4FB2A5444F575D4F42", + 16, + ), + y_Z=int( + "0DF213417EBE4D8E40A5F76F66C56470C489A3478D146DECF6DF0D94BA" + "E9E598157290F8756066975F1DB34B2324B7BD", + 16, + ), + ) + + def test_brainpoolP512r1(self): + self._do( + curve=curve_brainpoolp512r1, + generator=BRAINPOOLP512r1.generator, + dA=int( + "16302FF0DBBB5A8D733DAB7141C1B45ACBC8715939677F6A56850A38BD8" + "7BD59B09E80279609FF333EB9D4C061231FB26F92EEB04982A5F1D1764C" + "AD57665422", + 16, + ), + x_qA=int( + "0A420517E406AAC0ACDCE90FCD71487718D3B953EFD7FBEC5F7F27E28" + "C6149999397E91E029E06457DB2D3E640668B392C2A7E737A7F0BF044" + "36D11640FD09FD", + 16, + ), + y_qA=int( + "72E6882E8DB28AAD36237CD25D580DB23783961C8DC52DFA2EC138AD4" + "72A0FCEF3887CF62B623B2A87DE5C588301EA3E5FC269B373B60724F5" + "E82A6AD147FDE7", + 16, + ), + dB=int( + "230E18E1BCC88A362FA54E4EA3902009292F7F8033624FD471B5D8ACE49" + "D12CFABBC19963DAB8E2F1EBA00BFFB29E4D72D13F2224562F405CB8050" + "3666B25429", + 16, + ), + x_qB=int( + "9D45F66DE5D67E2E6DB6E93A59CE0BB48106097FF78A081DE781CDB31" + "FCE8CCBAAEA8DD4320C4119F1E9CD437A2EAB3731FA9668AB268D871D" + "EDA55A5473199F", + 16, + ), + y_qB=int( + "2FDC313095BCDD5FB3A91636F07A959C8E86B5636A1E930E8396049CB" + "481961D365CC11453A06C719835475B12CB52FC3C383BCE35E27EF194" + "512B71876285FA", + 16, + ), + x_Z=int( + "A7927098655F1F9976FA50A9D566865DC530331846381C87256BAF3226" + "244B76D36403C024D7BBF0AA0803EAFF405D3D24F11A9B5C0BEF679FE1" + "454B21C4CD1F", + 16, + ), + y_Z=int( + "7DB71C3DEF63212841C463E881BDCF055523BD368240E6C3143BD8DEF8" + "B3B3223B95E0F53082FF5E412F4222537A43DF1C6D25729DDB51620A83" + "2BE6A26680A2", + 16, + ), + ) + + +# https://tools.ietf.org/html/rfc4754#page-5 +@pytest.mark.parametrize( + "w, gwx, gwy, k, msg, md, r, s, curve", + [ + pytest.param( + "DC51D3866A15BACDE33D96F992FCA99DA7E6EF0934E7097559C27F1614C88A7F", + "2442A5CC0ECD015FA3CA31DC8E2BBC70BF42D60CBCA20085E0822CB04235E970", + "6FC98BD7E50211A4A27102FA3549DF79EBCB4BF246B80945CDDFE7D509BBFD7D", + "9E56F509196784D963D1C0A401510EE7ADA3DCC5DEE04B154BF61AF1D5A6DECE", + b"abc", + sha256, + "CB28E0999B9C7715FD0A80D8E47A77079716CBBF917DD72E97566EA1C066957C", + "86FA3BB4E26CAD5BF90B7F81899256CE7594BB1EA0C89212748BFF3B3D5B0315", + NIST256p, + id="ECDSA-256", + ), + pytest.param( + "0BEB646634BA87735D77AE4809A0EBEA865535DE4C1E1DCB692E84708E81A5AF" + "62E528C38B2A81B35309668D73524D9F", + "96281BF8DD5E0525CA049C048D345D3082968D10FEDF5C5ACA0C64E6465A97EA" + "5CE10C9DFEC21797415710721F437922", + "447688BA94708EB6E2E4D59F6AB6D7EDFF9301D249FE49C33096655F5D502FAD" + "3D383B91C5E7EDAA2B714CC99D5743CA", + "B4B74E44D71A13D568003D7489908D564C7761E229C58CBFA18950096EB7463B" + "854D7FA992F934D927376285E63414FA", + b"abc", + sha384, + "FB017B914E29149432D8BAC29A514640B46F53DDAB2C69948084E2930F1C8F7E" + "08E07C9C63F2D21A07DCB56A6AF56EB3", + "B263A1305E057F984D38726A1B46874109F417BCA112674C528262A40A629AF1" + "CBB9F516CE0FA7D2FF630863A00E8B9F", + NIST384p, + id="ECDSA-384", + ), + pytest.param( + "0065FDA3409451DCAB0A0EAD45495112A3D813C17BFD34BDF8C1209D7DF58491" + "20597779060A7FF9D704ADF78B570FFAD6F062E95C7E0C5D5481C5B153B48B37" + "5FA1", + "0151518F1AF0F563517EDD5485190DF95A4BF57B5CBA4CF2A9A3F6474725A35F" + "7AFE0A6DDEB8BEDBCD6A197E592D40188901CECD650699C9B5E456AEA5ADD190" + "52A8", + "006F3B142EA1BFFF7E2837AD44C9E4FF6D2D34C73184BBAD90026DD5E6E85317" + "D9DF45CAD7803C6C20035B2F3FF63AFF4E1BA64D1C077577DA3F4286C58F0AEA" + "E643", + "00C1C2B305419F5A41344D7E4359933D734096F556197A9B244342B8B62F46F9" + "373778F9DE6B6497B1EF825FF24F42F9B4A4BD7382CFC3378A540B1B7F0C1B95" + "6C2F", + b"abc", + sha512, + "0154FD3836AF92D0DCA57DD5341D3053988534FDE8318FC6AAAAB68E2E6F4339" + "B19F2F281A7E0B22C269D93CF8794A9278880ED7DBB8D9362CAEACEE54432055" + "2251", + "017705A7030290D1CEB605A9A1BB03FF9CDD521E87A696EC926C8C10C8362DF4" + "975367101F67D1CF9BCCBF2F3D239534FA509E70AAC851AE01AAC68D62F86647" + "2660", + NIST521p, + id="ECDSA-521", + ), + ], +) +def test_RFC4754_vectors(w, gwx, gwy, k, msg, md, r, s, curve): + sk = SigningKey.from_string(unhexlify(w), curve) + vk = VerifyingKey.from_string(unhexlify(gwx + gwy), curve) + assert sk.verifying_key == vk + sig = sk.sign(msg, hashfunc=md, sigencode=sigencode_strings, k=int(k, 16)) + + assert sig == (unhexlify(r), unhexlify(s)) + + assert vk.verify(sig, msg, md, sigdecode_strings) diff --git a/venv/lib/python3.10/site-packages/ecdsa/test_rw_lock.py b/venv/lib/python3.10/site-packages/ecdsa/test_rw_lock.py new file mode 100644 index 0000000..0a84b9c --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/test_rw_lock.py @@ -0,0 +1,180 @@ +# Copyright Mateusz Kobos, (c) 2011 +# https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/ +# released under the MIT licence + +try: + import unittest2 as unittest +except ImportError: + import unittest +import threading +import time +import copy +from ._rwlock import RWLock + + +class Writer(threading.Thread): + def __init__( + self, buffer_, rw_lock, init_sleep_time, sleep_time, to_write + ): + """ + @param buffer_: common buffer_ shared by the readers and writers + @type buffer_: list + @type rw_lock: L{RWLock} + @param init_sleep_time: sleep time before doing any action + @type init_sleep_time: C{float} + @param sleep_time: sleep time while in critical section + @type sleep_time: C{float} + @param to_write: data that will be appended to the buffer + """ + threading.Thread.__init__(self) + self.__buffer = buffer_ + self.__rw_lock = rw_lock + self.__init_sleep_time = init_sleep_time + self.__sleep_time = sleep_time + self.__to_write = to_write + self.entry_time = None + """Time of entry to the critical section""" + self.exit_time = None + """Time of exit from the critical section""" + + def run(self): + time.sleep(self.__init_sleep_time) + self.__rw_lock.writer_acquire() + self.entry_time = time.time() + time.sleep(self.__sleep_time) + self.__buffer.append(self.__to_write) + self.exit_time = time.time() + self.__rw_lock.writer_release() + + +class Reader(threading.Thread): + def __init__(self, buffer_, rw_lock, init_sleep_time, sleep_time): + """ + @param buffer_: common buffer shared by the readers and writers + @type buffer_: list + @type rw_lock: L{RWLock} + @param init_sleep_time: sleep time before doing any action + @type init_sleep_time: C{float} + @param sleep_time: sleep time while in critical section + @type sleep_time: C{float} + """ + threading.Thread.__init__(self) + self.__buffer = buffer_ + self.__rw_lock = rw_lock + self.__init_sleep_time = init_sleep_time + self.__sleep_time = sleep_time + self.buffer_read = None + """a copy of a the buffer read while in critical section""" + self.entry_time = None + """Time of entry to the critical section""" + self.exit_time = None + """Time of exit from the critical section""" + + def run(self): + time.sleep(self.__init_sleep_time) + self.__rw_lock.reader_acquire() + self.entry_time = time.time() + time.sleep(self.__sleep_time) + self.buffer_read = copy.deepcopy(self.__buffer) + self.exit_time = time.time() + self.__rw_lock.reader_release() + + +class RWLockTestCase(unittest.TestCase): + def test_readers_nonexclusive_access(self): + (buffer_, rw_lock, threads) = self.__init_variables() + + threads.append(Reader(buffer_, rw_lock, 0, 0)) + threads.append(Writer(buffer_, rw_lock, 0.2, 0.4, 1)) + threads.append(Reader(buffer_, rw_lock, 0.3, 0.3)) + threads.append(Reader(buffer_, rw_lock, 0.5, 0)) + + self.__start_and_join_threads(threads) + + ## The third reader should enter after the second one but it should + ## exit before the second one exits + ## (i.e. the readers should be in the critical section + ## at the same time) + + self.assertEqual([], threads[0].buffer_read) + self.assertEqual([1], threads[2].buffer_read) + self.assertEqual([1], threads[3].buffer_read) + self.assertTrue(threads[1].exit_time <= threads[2].entry_time) + self.assertTrue(threads[2].entry_time <= threads[3].entry_time) + self.assertTrue(threads[3].exit_time < threads[2].exit_time) + + def test_writers_exclusive_access(self): + (buffer_, rw_lock, threads) = self.__init_variables() + + threads.append(Writer(buffer_, rw_lock, 0, 0.4, 1)) + threads.append(Writer(buffer_, rw_lock, 0.1, 0, 2)) + threads.append(Reader(buffer_, rw_lock, 0.2, 0)) + + self.__start_and_join_threads(threads) + + ## The second writer should wait for the first one to exit + + self.assertEqual([1, 2], threads[2].buffer_read) + self.assertTrue(threads[0].exit_time <= threads[1].entry_time) + self.assertTrue(threads[1].exit_time <= threads[2].exit_time) + + def test_writer_priority(self): + (buffer_, rw_lock, threads) = self.__init_variables() + + threads.append(Writer(buffer_, rw_lock, 0, 0, 1)) + threads.append(Reader(buffer_, rw_lock, 0.1, 0.4)) + threads.append(Writer(buffer_, rw_lock, 0.2, 0, 2)) + threads.append(Reader(buffer_, rw_lock, 0.3, 0)) + threads.append(Reader(buffer_, rw_lock, 0.3, 0)) + + self.__start_and_join_threads(threads) + + ## The second writer should go before the second and the third reader + + self.assertEqual([1], threads[1].buffer_read) + self.assertEqual([1, 2], threads[3].buffer_read) + self.assertEqual([1, 2], threads[4].buffer_read) + self.assertTrue(threads[0].exit_time < threads[1].entry_time) + self.assertTrue(threads[1].exit_time <= threads[2].entry_time) + self.assertTrue(threads[2].exit_time <= threads[3].entry_time) + self.assertTrue(threads[2].exit_time <= threads[4].entry_time) + + def test_many_writers_priority(self): + (buffer_, rw_lock, threads) = self.__init_variables() + + threads.append(Writer(buffer_, rw_lock, 0, 0, 1)) + threads.append(Reader(buffer_, rw_lock, 0.1, 0.6)) + threads.append(Writer(buffer_, rw_lock, 0.2, 0.1, 2)) + threads.append(Reader(buffer_, rw_lock, 0.3, 0)) + threads.append(Reader(buffer_, rw_lock, 0.4, 0)) + threads.append(Writer(buffer_, rw_lock, 0.5, 0.1, 3)) + + self.__start_and_join_threads(threads) + + ## The two last writers should go first -- after the first reader and + ## before the second and the third reader + + self.assertEqual([1], threads[1].buffer_read) + self.assertEqual([1, 2, 3], threads[3].buffer_read) + self.assertEqual([1, 2, 3], threads[4].buffer_read) + self.assertTrue(threads[0].exit_time < threads[1].entry_time) + self.assertTrue(threads[1].exit_time <= threads[2].entry_time) + self.assertTrue(threads[1].exit_time <= threads[5].entry_time) + self.assertTrue(threads[2].exit_time <= threads[3].entry_time) + self.assertTrue(threads[2].exit_time <= threads[4].entry_time) + self.assertTrue(threads[5].exit_time <= threads[3].entry_time) + self.assertTrue(threads[5].exit_time <= threads[4].entry_time) + + @staticmethod + def __init_variables(): + buffer_ = [] + rw_lock = RWLock() + threads = [] + return (buffer_, rw_lock, threads) + + @staticmethod + def __start_and_join_threads(threads): + for t in threads: + t.start() + for t in threads: + t.join() diff --git a/venv/lib/python3.10/site-packages/ecdsa/util.py b/venv/lib/python3.10/site-packages/ecdsa/util.py new file mode 100644 index 0000000..818240b --- /dev/null +++ b/venv/lib/python3.10/site-packages/ecdsa/util.py @@ -0,0 +1,435 @@ +from __future__ import division + +import os +import math +import binascii +import sys +from hashlib import sha256 +from six import PY2, int2byte, b, next +from . import der +from ._compat import normalise_bytes + +# RFC5480: +# The "unrestricted" algorithm identifier is: +# id-ecPublicKey OBJECT IDENTIFIER ::= { +# iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 } + +oid_ecPublicKey = (1, 2, 840, 10045, 2, 1) +encoded_oid_ecPublicKey = der.encode_oid(*oid_ecPublicKey) + +# RFC5480: +# The ECDH algorithm uses the following object identifier: +# id-ecDH OBJECT IDENTIFIER ::= { +# iso(1) identified-organization(3) certicom(132) schemes(1) +# ecdh(12) } + +oid_ecDH = (1, 3, 132, 1, 12) + +# RFC5480: +# The ECMQV algorithm uses the following object identifier: +# id-ecMQV OBJECT IDENTIFIER ::= { +# iso(1) identified-organization(3) certicom(132) schemes(1) +# ecmqv(13) } + +oid_ecMQV = (1, 3, 132, 1, 13) + +if sys.version_info >= (3,): # pragma: no branch + + def entropy_to_bits(ent_256): + """Convert a bytestring to string of 0's and 1's""" + return bin(int.from_bytes(ent_256, "big"))[2:].zfill(len(ent_256) * 8) + + +else: + + def entropy_to_bits(ent_256): + """Convert a bytestring to string of 0's and 1's""" + return "".join(bin(ord(x))[2:].zfill(8) for x in ent_256) + + +if sys.version_info < (2, 7): # pragma: no branch + # Can't add a method to a built-in type so we are stuck with this + def bit_length(x): + return len(bin(x)) - 2 + + +else: + + def bit_length(x): + return x.bit_length() or 1 + + +def orderlen(order): + return (1 + len("%x" % order)) // 2 # bytes + + +def randrange(order, entropy=None): + """Return a random integer k such that 1 <= k < order, uniformly + distributed across that range. Worst case should be a mean of 2 loops at + (2**k)+2. + + Note that this function is not declared to be forwards-compatible: we may + change the behavior in future releases. The entropy= argument (which + should get a callable that behaves like os.urandom) can be used to + achieve stability within a given release (for repeatable unit tests), but + should not be used as a long-term-compatible key generation algorithm. + """ + assert order > 1 + if entropy is None: + entropy = os.urandom + upper_2 = bit_length(order - 2) + upper_256 = upper_2 // 8 + 1 + while True: # I don't think this needs a counter with bit-wise randrange + ent_256 = entropy(upper_256) + ent_2 = entropy_to_bits(ent_256) + rand_num = int(ent_2[:upper_2], base=2) + 1 + if 0 < rand_num < order: + return rand_num + + +class PRNG: + # this returns a callable which, when invoked with an integer N, will + # return N pseudorandom bytes. Note: this is a short-term PRNG, meant + # primarily for the needs of randrange_from_seed__trytryagain(), which + # only needs to run it a few times per seed. It does not provide + # protection against state compromise (forward security). + def __init__(self, seed): + self.generator = self.block_generator(seed) + + def __call__(self, numbytes): + a = [next(self.generator) for i in range(numbytes)] + + if PY2: # pragma: no branch + return "".join(a) + else: + return bytes(a) + + def block_generator(self, seed): + counter = 0 + while True: + for byte in sha256( + ("prng-%d-%s" % (counter, seed)).encode() + ).digest(): + yield byte + counter += 1 + + +def randrange_from_seed__overshoot_modulo(seed, order): + # hash the data, then turn the digest into a number in [1,order). + # + # We use David-Sarah Hopwood's suggestion: turn it into a number that's + # sufficiently larger than the group order, then modulo it down to fit. + # This should give adequate (but not perfect) uniformity, and simple + # code. There are other choices: try-try-again is the main one. + base = PRNG(seed)(2 * orderlen(order)) + number = (int(binascii.hexlify(base), 16) % (order - 1)) + 1 + assert 1 <= number < order, (1, number, order) + return number + + +def lsb_of_ones(numbits): + return (1 << numbits) - 1 + + +def bits_and_bytes(order): + bits = int(math.log(order - 1, 2) + 1) + bytes = bits // 8 + extrabits = bits % 8 + return bits, bytes, extrabits + + +# the following randrange_from_seed__METHOD() functions take an +# arbitrarily-sized secret seed and turn it into a number that obeys the same +# range limits as randrange() above. They are meant for deriving consistent +# signing keys from a secret rather than generating them randomly, for +# example a protocol in which three signing keys are derived from a master +# secret. You should use a uniformly-distributed unguessable seed with about +# curve.baselen bytes of entropy. To use one, do this: +# seed = os.urandom(curve.baselen) # or other starting point +# secexp = ecdsa.util.randrange_from_seed__trytryagain(sed, curve.order) +# sk = SigningKey.from_secret_exponent(secexp, curve) + + +def randrange_from_seed__truncate_bytes(seed, order, hashmod=sha256): + # hash the seed, then turn the digest into a number in [1,order), but + # don't worry about trying to uniformly fill the range. This will lose, + # on average, four bits of entropy. + bits, _bytes, extrabits = bits_and_bytes(order) + if extrabits: + _bytes += 1 + base = hashmod(seed).digest()[:_bytes] + base = "\x00" * (_bytes - len(base)) + base + number = 1 + int(binascii.hexlify(base), 16) + assert 1 <= number < order + return number + + +def randrange_from_seed__truncate_bits(seed, order, hashmod=sha256): + # like string_to_randrange_truncate_bytes, but only lose an average of + # half a bit + bits = int(math.log(order - 1, 2) + 1) + maxbytes = (bits + 7) // 8 + base = hashmod(seed).digest()[:maxbytes] + base = "\x00" * (maxbytes - len(base)) + base + topbits = 8 * maxbytes - bits + if topbits: + base = int2byte(ord(base[0]) & lsb_of_ones(topbits)) + base[1:] + number = 1 + int(binascii.hexlify(base), 16) + assert 1 <= number < order + return number + + +def randrange_from_seed__trytryagain(seed, order): + # figure out exactly how many bits we need (rounded up to the nearest + # bit), so we can reduce the chance of looping to less than 0.5 . This is + # specified to feed from a byte-oriented PRNG, and discards the + # high-order bits of the first byte as necessary to get the right number + # of bits. The average number of loops will range from 1.0 (when + # order=2**k-1) to 2.0 (when order=2**k+1). + assert order > 1 + bits, bytes, extrabits = bits_and_bytes(order) + generate = PRNG(seed) + while True: + extrabyte = b("") + if extrabits: + extrabyte = int2byte(ord(generate(1)) & lsb_of_ones(extrabits)) + guess = string_to_number(extrabyte + generate(bytes)) + 1 + if 1 <= guess < order: + return guess + + +def number_to_string(num, order): + l = orderlen(order) + fmt_str = "%0" + str(2 * l) + "x" + string = binascii.unhexlify((fmt_str % num).encode()) + assert len(string) == l, (len(string), l) + return string + + +def number_to_string_crop(num, order): + l = orderlen(order) + fmt_str = "%0" + str(2 * l) + "x" + string = binascii.unhexlify((fmt_str % num).encode()) + return string[:l] + + +def string_to_number(string): + return int(binascii.hexlify(string), 16) + + +def string_to_number_fixedlen(string, order): + l = orderlen(order) + assert len(string) == l, (len(string), l) + return int(binascii.hexlify(string), 16) + + +# these methods are useful for the sigencode= argument to SK.sign() and the +# sigdecode= argument to VK.verify(), and control how the signature is packed +# or unpacked. + + +def sigencode_strings(r, s, order): + r_str = number_to_string(r, order) + s_str = number_to_string(s, order) + return (r_str, s_str) + + +def sigencode_string(r, s, order): + """ + Encode the signature to raw format (:term:`raw encoding`) + + It's expected that this function will be used as a `sigencode=` parameter + in :func:`ecdsa.keys.SigningKey.sign` method. + + :param int r: first parameter of the signature + :param int s: second parameter of the signature + :param int order: the order of the curve over which the signature was + computed + + :return: raw encoding of ECDSA signature + :rtype: bytes + """ + # for any given curve, the size of the signature numbers is + # fixed, so just use simple concatenation + r_str, s_str = sigencode_strings(r, s, order) + return r_str + s_str + + +def sigencode_der(r, s, order): + """ + Encode the signature into the ECDSA-Sig-Value structure using :term:`DER`. + + Encodes the signature to the following :term:`ASN.1` structure:: + + Ecdsa-Sig-Value ::= SEQUENCE { + r INTEGER, + s INTEGER + } + + It's expected that this function will be used as a `sigencode=` parameter + in :func:`ecdsa.keys.SigningKey.sign` method. + + :param int r: first parameter of the signature + :param int s: second parameter of the signature + :param int order: the order of the curve over which the signature was + computed + + :return: DER encoding of ECDSA signature + :rtype: bytes + """ + return der.encode_sequence(der.encode_integer(r), der.encode_integer(s)) + + +# canonical versions of sigencode methods +# these enforce low S values, by negating the value (modulo the order) if +# above order/2 see CECKey::Sign() +# https://github.com/bitcoin/bitcoin/blob/master/src/key.cpp#L214 +def sigencode_strings_canonize(r, s, order): + if s > order / 2: + s = order - s + return sigencode_strings(r, s, order) + + +def sigencode_string_canonize(r, s, order): + if s > order / 2: + s = order - s + return sigencode_string(r, s, order) + + +def sigencode_der_canonize(r, s, order): + if s > order / 2: + s = order - s + return sigencode_der(r, s, order) + + +class MalformedSignature(Exception): + """ + Raised by decoding functions when the signature is malformed. + + Malformed in this context means that the relevant strings or integers + do not match what a signature over provided curve would create. Either + because the byte strings have incorrect lengths or because the encoded + values are too large. + """ + + pass + + +def sigdecode_string(signature, order): + """ + Decoder for :term:`raw encoding` of ECDSA signatures. + + raw encoding is a simple concatenation of the two integers that comprise + the signature, with each encoded using the same amount of bytes depending + on curve size/order. + + It's expected that this function will be used as the `sigdecode=` + parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. + + :param signature: encoded signature + :type signature: bytes like object + :param order: order of the curve over which the signature was computed + :type order: int + + :raises MalformedSignature: when the encoding of the signature is invalid + + :return: tuple with decoded 'r' and 's' values of signature + :rtype: tuple of ints + """ + signature = normalise_bytes(signature) + l = orderlen(order) + if not len(signature) == 2 * l: + raise MalformedSignature( + "Invalid length of signature, expected {0} bytes long, " + "provided string is {1} bytes long".format(2 * l, len(signature)) + ) + r = string_to_number_fixedlen(signature[:l], order) + s = string_to_number_fixedlen(signature[l:], order) + return r, s + + +def sigdecode_strings(rs_strings, order): + """ + Decode the signature from two strings. + + First string needs to be a big endian encoding of 'r', second needs to + be a big endian encoding of the 's' parameter of an ECDSA signature. + + It's expected that this function will be used as the `sigdecode=` + parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. + + :param list rs_strings: list of two bytes-like objects, each encoding one + parameter of signature + :param int order: order of the curve over which the signature was computed + + :raises MalformedSignature: when the encoding of the signature is invalid + + :return: tuple with decoded 'r' and 's' values of signature + :rtype: tuple of ints + """ + if not len(rs_strings) == 2: + raise MalformedSignature( + "Invalid number of strings provided: {0}, expected 2".format( + len(rs_strings) + ) + ) + (r_str, s_str) = rs_strings + r_str = normalise_bytes(r_str) + s_str = normalise_bytes(s_str) + l = orderlen(order) + if not len(r_str) == l: + raise MalformedSignature( + "Invalid length of first string ('r' parameter), " + "expected {0} bytes long, provided string is {1} " + "bytes long".format(l, len(r_str)) + ) + if not len(s_str) == l: + raise MalformedSignature( + "Invalid length of second string ('s' parameter), " + "expected {0} bytes long, provided string is {1} " + "bytes long".format(l, len(s_str)) + ) + r = string_to_number_fixedlen(r_str, order) + s = string_to_number_fixedlen(s_str, order) + return r, s + + +def sigdecode_der(sig_der, order): + """ + Decoder for DER format of ECDSA signatures. + + DER format of signature is one that uses the :term:`ASN.1` :term:`DER` + rules to encode it as a sequence of two integers:: + + Ecdsa-Sig-Value ::= SEQUENCE { + r INTEGER, + s INTEGER + } + + It's expected that this function will be used as as the `sigdecode=` + parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. + + :param sig_der: encoded signature + :type sig_der: bytes like object + :param order: order of the curve over which the signature was computed + :type order: int + + :raises UnexpectedDER: when the encoding of signature is invalid + + :return: tuple with decoded 'r' and 's' values of signature + :rtype: tuple of ints + """ + sig_der = normalise_bytes(sig_der) + # return der.encode_sequence(der.encode_integer(r), der.encode_integer(s)) + rs_strings, empty = der.remove_sequence(sig_der) + if empty != b"": + raise der.UnexpectedDER( + "trailing junk after DER sig: %s" % binascii.hexlify(empty) + ) + r, rest = der.remove_integer(rs_strings) + s, empty = der.remove_integer(rest) + if empty != b"": + raise der.UnexpectedDER( + "trailing junk after DER numbers: %s" % binascii.hexlify(empty) + ) + return r, s diff --git a/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/LICENSE b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/LICENSE new file mode 100644 index 0000000..0e259d4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/LICENSE @@ -0,0 +1,121 @@ +Creative Commons Legal Code + +CC0 1.0 Universal + + CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE + LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN + ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS + INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES + REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS + PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM + THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED + HEREUNDER. + +Statement of Purpose + +The laws of most jurisdictions throughout the world automatically confer +exclusive Copyright and Related Rights (defined below) upon the creator +and subsequent owner(s) (each and all, an "owner") of an original work of +authorship and/or a database (each, a "Work"). + +Certain owners wish to permanently relinquish those rights to a Work for +the purpose of contributing to a commons of creative, cultural and +scientific works ("Commons") that the public can reliably and without fear +of later claims of infringement build upon, modify, incorporate in other +works, reuse and redistribute as freely as possible in any form whatsoever +and for any purposes, including without limitation commercial purposes. +These owners may contribute to the Commons to promote the ideal of a free +culture and the further production of creative, cultural and scientific +works, or to gain reputation or greater distribution for their Work in +part through the use and efforts of others. + +For these and/or other purposes and motivations, and without any +expectation of additional consideration or compensation, the person +associating CC0 with a Work (the "Affirmer"), to the extent that he or she +is an owner of Copyright and Related Rights in the Work, voluntarily +elects to apply CC0 to the Work and publicly distribute the Work under its +terms, with knowledge of his or her Copyright and Related Rights in the +Work and the meaning and intended legal effect of CC0 on those rights. + +1. Copyright and Related Rights. A Work made available under CC0 may be +protected by copyright and related or neighboring rights ("Copyright and +Related Rights"). Copyright and Related Rights include, but are not +limited to, the following: + + i. the right to reproduce, adapt, distribute, perform, display, + communicate, and translate a Work; + ii. moral rights retained by the original author(s) and/or performer(s); +iii. publicity and privacy rights pertaining to a person's image or + likeness depicted in a Work; + iv. rights protecting against unfair competition in regards to a Work, + subject to the limitations in paragraph 4(a), below; + v. rights protecting the extraction, dissemination, use and reuse of data + in a Work; + vi. database rights (such as those arising under Directive 96/9/EC of the + European Parliament and of the Council of 11 March 1996 on the legal + protection of databases, and under any national implementation + thereof, including any amended or successor version of such + directive); and +vii. other similar, equivalent or corresponding rights throughout the + world based on applicable law or treaty, and any national + implementations thereof. + +2. Waiver. To the greatest extent permitted by, but not in contravention +of, applicable law, Affirmer hereby overtly, fully, permanently, +irrevocably and unconditionally waives, abandons, and surrenders all of +Affirmer's Copyright and Related Rights and associated claims and causes +of action, whether now known or unknown (including existing as well as +future claims and causes of action), in the Work (i) in all territories +worldwide, (ii) for the maximum duration provided by applicable law or +treaty (including future time extensions), (iii) in any current or future +medium and for any number of copies, and (iv) for any purpose whatsoever, +including without limitation commercial, advertising or promotional +purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each +member of the public at large and to the detriment of Affirmer's heirs and +successors, fully intending that such Waiver shall not be subject to +revocation, rescission, cancellation, termination, or any other legal or +equitable action to disrupt the quiet enjoyment of the Work by the public +as contemplated by Affirmer's express Statement of Purpose. + +3. Public License Fallback. Should any part of the Waiver for any reason +be judged legally invalid or ineffective under applicable law, then the +Waiver shall be preserved to the maximum extent permitted taking into +account Affirmer's express Statement of Purpose. In addition, to the +extent the Waiver is so judged Affirmer hereby grants to each affected +person a royalty-free, non transferable, non sublicensable, non exclusive, +irrevocable and unconditional license to exercise Affirmer's Copyright and +Related Rights in the Work (i) in all territories worldwide, (ii) for the +maximum duration provided by applicable law or treaty (including future +time extensions), (iii) in any current or future medium and for any number +of copies, and (iv) for any purpose whatsoever, including without +limitation commercial, advertising or promotional purposes (the +"License"). The License shall be deemed effective as of the date CC0 was +applied by Affirmer to the Work. Should any part of the License for any +reason be judged legally invalid or ineffective under applicable law, such +partial invalidity or ineffectiveness shall not invalidate the remainder +of the License, and in such case Affirmer hereby affirms that he or she +will not (i) exercise any of his or her remaining Copyright and Related +Rights in the Work or (ii) assert any associated claims and causes of +action with respect to the Work, in either case contrary to Affirmer's +express Statement of Purpose. + +4. Limitations and Disclaimers. + + a. No trademark or patent rights held by Affirmer are waived, abandoned, + surrendered, licensed or otherwise affected by this document. + b. Affirmer offers the Work as-is and makes no representations or + warranties of any kind concerning the Work, express, implied, + statutory or otherwise, including without limitation warranties of + title, merchantability, fitness for a particular purpose, non + infringement, or the absence of latent or other defects, accuracy, or + the present or absence of errors, whether or not discoverable, all to + the greatest extent permissible under applicable law. + c. Affirmer disclaims responsibility for clearing rights of other persons + that may apply to the Work or any use thereof, including without + limitation any person's Copyright and Related Rights in the Work. + Further, Affirmer disclaims responsibility for obtaining any necessary + consents, permissions or other rights required for any use of the + Work. + d. Affirmer understands and acknowledges that Creative Commons is not a + party to this document and has no duty or obligation with respect to + this CC0 or use of the Work. diff --git a/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/METADATA b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/METADATA new file mode 100644 index 0000000..040e696 --- /dev/null +++ b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/METADATA @@ -0,0 +1,478 @@ +Metadata-Version: 2.1 +Name: email-validator +Version: 1.2.1 +Summary: A robust email syntax and deliverability validation library. +Home-page: https://github.com/JoshData/python-email-validator +Author: Joshua Tauberer +Author-email: jt@occams.info +License: CC0 (copyright waived) +Keywords: email address validator +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: dnspython (>=1.15.0) +Requires-Dist: idna (>=2.0.0) + +email-validator: Validate Email Addresses +========================================= + +A robust email address syntax and deliverability validation library for +Python by [Joshua Tauberer](https://joshdata.me). + +This library validates that a string is of the form `name@example.com`. This is +the sort of validation you would want for an email-based login form on +a website. + +Key features: + +* Checks that an email address has the correct syntax --- good for + login forms or other uses related to identifying users. +* Gives friendly error messages when validation fails (appropriate to show + to end users). +* (optionally) Checks deliverability: Does the domain name resolve? And you can override the default DNS resolver. +* Supports internationalized domain names and (optionally) + internationalized local parts, but blocks unsafe characters. +* Normalizes email addresses (super important for internationalized + addresses! see below). + +The library is NOT for validation of the To: line in an email message +(e.g. `My Name `), which +[flanker](https://github.com/mailgun/flanker) is more appropriate for. +And this library does NOT permit obsolete forms of email addresses, so +if you need strict validation against the email specs exactly, use +[pyIsEmail](https://github.com/michaelherold/pyIsEmail). + +This library is tested with Python 3.6+ but should work in earlier versions: + +[![Build Status](https://app.travis-ci.com/JoshData/python-email-validator.svg?branch=main)](https://app.travis-ci.com/JoshData/python-email-validator) + +--- + +This library was first published in 2015. The current version is 1.2.1 +(posted May 1, 2022). The main changes in version 1.2 are: + +* Rejecting domains with NULL MX records (when deliverability checks + are turned on). +* Rejecting unsafe unicode characters. (Some of these checks you should + be doing on all of your user inputs already!) +* Rejecting most special-use reserved domain names. A new `test_environment` + option is added for using `@*.test` domains. +* Some fixes in the tests. + +--- + +Installation +------------ + +This package [is on PyPI](https://pypi.org/project/email-validator/), so: + +```sh +pip install email-validator +``` + +`pip3` also works. + +Quick Start +----------- + +If you're validating a user's email address before creating a user +account in your application, you might do this: + +```python +from email_validator import validate_email, EmailNotValidError + +email = "my+address@mydomain.tld" + +try: + # Validate & take the normalized form of the email + # address for all logic beyond this point (especially + # before going to a database query where equality + # does not take into account normalization). + email = validate_email(email).email +except EmailNotValidError as e: + # email is not valid, exception message is human-readable + print(str(e)) +``` + +This validates the address and gives you its normalized form. You should +**put the normalized form in your database** and always normalize before +checking if an address is in your database. + +The validator will accept internationalized email addresses, but not all +mail systems can send email to an addresses with non-English characters in +the *local* part of the address (before the @-sign). See the `allow_smtputf8` +option below. + +Usage +----- + +### Overview + +The module provides a function `validate_email(email_address)` which +takes an email address (either a `str` or `bytes`, but only non-internationalized +addresses are allowed when passing a `bytes`) and: + +- Raises a `EmailNotValidError` with a helpful, human-readable error + message explaining why the email address is not valid, or +- Returns an object with a normalized form of the email address (which + you should use!) and other information about it. + +When an email address is not valid, `validate_email` raises either an +`EmailSyntaxError` if the form of the address is invalid or an +`EmailUndeliverableError` if the domain name fails the DNS check. Both +exception classes are subclasses of `EmailNotValidError`, which in turn +is a subclass of `ValueError`. + +But when an email address is valid, an object is returned containing +a normalized form of the email address (which you should use!) and +other information. + +The validator doesn't permit obsoleted forms of email addresses that no +one uses anymore even though they are still valid and deliverable, since +they will probably give you grief if you're using email for login. (See +later in the document about that.) + +The validator checks that the domain name in the email address has a +MX DNS record indicating that it is configured for email (except any NULL +MX record or `v=spf1 -all` SPF record will cause the address to be rejected). +There is nothing to be gained by trying to actually contact an SMTP +server, so that's not done here. For privacy, security, and practicality +reasons servers are good at not giving away whether an address is +deliverable or not: email addresses that appear to accept mail at first +can bounce mail after a delay, and bounced mail may indicate a temporary +failure of a good email address (sometimes an intentional failure, like +greylisting). (A/AAAA-record fallback is also checked.) + +### Options + +The `validate_email` function also accepts the following keyword arguments +(defaults are as shown below): + +`allow_smtputf8=True`: Set to `False` to prohibit internationalized addresses that would + require the + [SMTPUTF8](https://tools.ietf.org/html/rfc6531) extension. You can also set `email_validator.ALLOW_SMTPUTF8` to `False` to turn it off for all calls by default. + +`check_deliverability=True`: Set to `False` to skip the domain name MX DNS record check. You can also set `email_validator.CHECK_DELIVERABILITY` to `False` to turn it off for all calls by default. + +`allow_empty_local=False`: Set to `True` to allow an empty local part (i.e. + `@example.com`), e.g. for validating Postfix aliases. + +`dns_resolver=None`: Pass an instance of [dns.resolver.Resolver](https://dnspython.readthedocs.io/en/latest/resolver-class.html) to control the DNS resolver including setting a timeout and [a cache](https://dnspython.readthedocs.io/en/latest/resolver-caching.html). The `caching_resolver` function shown above is a helper function to construct a dns.resolver.Resolver with a [LRUCache](https://dnspython.readthedocs.io/en/latest/resolver-caching.html#dns.resolver.LRUCache). Reuse the same resolver instance across calls to `validate_email` to make use of the cache. + +`test_environment=False`: DNS-based deliverability checks are disabled and `test` and `subdomain.test` domain names are permitted (see below). You can also set `email_validator.TEST_ENVIRONMENT` to `True` to turn it on for all calls by default. + +### DNS timeout and cache + +When validating many email addresses or to control the timeout (the default is 15 seconds), create a caching [dns.resolver.Resolver](https://dnspython.readthedocs.io/en/latest/resolver-class.html) to reuse in each call. The `caching_resolver` function returns one easily for you: + +```python +from email_validator import validate_email, caching_resolver + +resolver = caching_resolver(timeout=10) + +while True: + email = validate_email(email, dns_resolver=resolver).email +``` + +### Test addresses + +This library rejects email addresess that use the [Special Use Domain Names](https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml) `invalid`, `localhost`, `test`, and some others by raising `EmailUndeliverableError`. This is to protect your system from abuse: You probably don't want a user to be able to cause an email to be sent to `localhost`. However, in your non-production test environments you may want to use `@test` or `@myname.test` email addresses. There are three ways you can allow this: + +1. Add `test_environment=True` to the call to `validate_email` (see above). +2. Set `email_validator.TEST_ENVIRONMENT` to `True`. +3. Remove the special-use domain name that you want to use from `email_validator.SPECIAL_USE_DOMAIN_NAMES`: + +```python +import email_validator +email_validator.SPECIAL_USE_DOMAIN_NAMES.remove("test") +``` + +It is tempting to use `@example.com/net/org` in tests. These domains are reserved to IANA for use in documentation so there is no risk of accidentally emailing someone at those domains. But beware that this library will reject these domain names if DNS-based deliverability checks are not disabled because these domains do not resolve to domains that accept email. In tests, consider using your own domain name or `@test` or `@myname.test` instead. + +Internationalized email addresses +--------------------------------- + +The email protocol SMTP and the domain name system DNS have historically +only allowed English (ASCII) characters in email addresses and domain names, +respectively. Each has adapted to internationalization in a separate +way, creating two separate aspects to email address +internationalization. + +### Internationalized domain names (IDN) + +The first is [internationalized domain names (RFC +5891)](https://tools.ietf.org/html/rfc5891), a.k.a IDNA 2008. The DNS +system has not been updated with Unicode support. Instead, internationalized +domain names are converted into a special IDNA ASCII "[Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt)" +form starting with `xn--`. When an email address has non-ASCII +characters in its domain part, the domain part is replaced with its IDNA +ASCII equivalent form in the process of mail transmission. Your mail +submission library probably does this for you transparently. Note that +most web browsers are currently in transition between IDNA 2003 (RFC +3490) and IDNA 2008 (RFC 5891) and [compliance around the web is not +very +good](http://archives.miloush.net/michkap/archive/2012/02/27/10273315.html) +in any case, so be aware that edge cases are handled differently by +different applications and libraries. This library conforms to IDNA 2008 +using the [idna](https://github.com/kjd/idna) module by Kim Davies. + +### Internationalized local parts + +The second sort of internationalization is internationalization in the +*local* part of the address (before the @-sign). In non-internationalized +email addresses, only English letters, numbers, and some punctuation +(`._!#$%&'^``*+-=~/?{|}`) are allowed. In internationalized email address +local parts, a wider range of Unicode characters are allowed. + +A surprisingly large number of Unicode characters are not safe to display, +especially when the email address is concatenated with other text, so this +library tries to protect you by not permitting resvered, non-, private use, +formatting (which can be used to alter the display order of characters), +whitespace, and control characters, and combining characters +as the first character (so that they cannot combine with something outside +of the email address string). See https://qntm.org/safe and https://trojansource.codes/ +for relevant prior work. (Other than whitespace, these are checks that +you should be applying to nearly all user inputs in a security-sensitive +context.) + +These character checks are performed after Unicode normalization (see below), +so you are only fully protected if you replace all user-provided email addresses +with the normalized email address string returned by this library. This does not +guard against the well known problem that many Unicode characters look alike +(or are identical), which can be used to fool humans reading displayed text. + +Email addresses with these non-ASCII characters require that your mail +submission library and the mail servers along the route to the destination, +including your own outbound mail server, all support the +[SMTPUTF8 (RFC 6531)](https://tools.ietf.org/html/rfc6531) extension. +Support for SMTPUTF8 varies. See the `allow_smtputf8` parameter. + +### If you know ahead of time that SMTPUTF8 is not supported by your mail submission stack + +By default all internationalized forms are accepted by the validator. +But if you know ahead of time that SMTPUTF8 is not supported by your +mail submission stack, then you must filter out addresses that require +SMTPUTF8 using the `allow_smtputf8=False` keyword argument (see above). +This will cause the validation function to raise a `EmailSyntaxError` if +delivery would require SMTPUTF8. That's just in those cases where +non-ASCII characters appear before the @-sign. If you do not set +`allow_smtputf8=False`, you can also check the value of the `smtputf8` +field in the returned object. + +If your mail submission library doesn't support Unicode at all --- even +in the domain part of the address --- then immediately prior to mail +submission you must replace the email address with its ASCII-ized form. +This library gives you back the ASCII-ized form in the `ascii_email` +field in the returned object, which you can get like this: + +```python +valid = validate_email(email, allow_smtputf8=False) +email = valid.ascii_email +``` + +The local part is left alone (if it has internationalized characters +`allow_smtputf8=False` will force validation to fail) and the domain +part is converted to [IDNA ASCII](https://tools.ietf.org/html/rfc5891). +(You probably should not do this at account creation time so you don't +change the user's login information without telling them.) + +### UCS-4 support required for Python 2.7 + +This library hopefully still works with Python 2.7. +Note that when using Python 2.7, it is required that it was built with +UCS-4 support (see +[here](https://stackoverflow.com/questions/29109944/python-returns-length-of-2-for-single-unicode-character-string)); +otherwise emails with unicode characters outside of the BMP (Basic +Multilingual Plane) will not validate correctly. + +Normalization +------------- + +The use of Unicode in email addresses introduced a normalization +problem. Different Unicode strings can look identical and have the same +semantic meaning to the user. The `email` field returned on successful +validation provides the correctly normalized form of the given email +address: + +```python +valid = validate_email("me@Domain.com") +email = valid.ascii_email +print(email) +# prints: me@domain.com +``` + +Because an end-user might type their email address in different (but +equivalent) un-normalized forms at different times, you ought to +replace what they enter with the normalized form immediately prior to +going into your database (during account creation), querying your database +(during login), or sending outbound mail. Normalization may also change +the length of an email address, and this may affect whether it is valid +and acceptable by your SMTP provider. + +The normalizations include lowercasing the domain part of the email +address (domain names are case-insensitive), [Unicode "NFC" +normalization](https://en.wikipedia.org/wiki/Unicode_equivalence) of the +whole address (which turns characters plus [combining +characters](https://en.wikipedia.org/wiki/Combining_character) into +precomposed characters where possible, replacement of [fullwidth and +halfwidth +characters](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) +in the domain part, possibly other +[UTS46](http://unicode.org/reports/tr46) mappings on the domain part, +and conversion from Punycode to Unicode characters. + +(See [RFC 6532 (internationalized email) section +3.1](https://tools.ietf.org/html/rfc6532#section-3.1) and [RFC 5895 +(IDNA 2008) section 2](http://www.ietf.org/rfc/rfc5895.txt).) + +Examples +-------- + +For the email address `test@joshdata.me`, the returned object is: + +```python +ValidatedEmail( + email='test@joshdata.me', + local_part='test', + domain='joshdata.me', + ascii_email='test@joshdata.me', + ascii_local_part='test', + ascii_domain='joshdata.me', + smtputf8=False, + mx=[(10, 'box.occams.info')], + mx_fallback_type=None) +``` + +For the fictitious address `example@ツ.life`, which has an +internationalized domain but ASCII local part, the returned object is: + +```python +ValidatedEmail( + email='example@ツ.life', + local_part='example', + domain='ツ.life', + ascii_email='example@xn--bdk.life', + ascii_local_part='example', + ascii_domain='xn--bdk.life', + smtputf8=False) + +``` + +Note that `smtputf8` is `False` even though the domain part is +internationalized because +[SMTPUTF8](https://tools.ietf.org/html/rfc6531) is only needed if the +local part of the address is internationalized (the domain part can be +converted to IDNA ASCII Punycode). Also note that the `email` and `domain` +fields provide a normalized form of the email address and domain name +(casefolding and Unicode normalization as required by IDNA 2008). + +Calling `validate_email` with the ASCII form of the above email address, +`example@xn--bdk.life`, returns the exact same information (i.e., the +`email` field always will contain Unicode characters, not Punycode). + +For the fictitious address `ツ-test@joshdata.me`, which has an +internationalized local part, the returned object is: + +```python +ValidatedEmail( + email='ツ-test@joshdata.me', + local_part='ツ-test', + domain='joshdata.me', + ascii_email=None, + ascii_local_part=None, + ascii_domain='joshdata.me', + smtputf8=True) +``` + +Now `smtputf8` is `True` and `ascii_email` is `None` because the local +part of the address is internationalized. The `local_part` and `email` fields +return the normalized form of the address: certain Unicode characters +(such as angstrom and ohm) may be replaced by other equivalent code +points (a-with-ring and omega). + +Return value +------------ + +When an email address passes validation, the fields in the returned object +are: + +| Field | Value | +| -----:|-------| +| `email` | The normalized form of the email address that you should put in your database. This merely combines the `local_part` and `domain` fields (see below). | +| `ascii_email` | If set, an ASCII-only form of the email address by replacing the domain part with [IDNA](https://tools.ietf.org/html/rfc5891) [Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt). This field will be present when an ASCII-only form of the email address exists (including if the email address is already ASCII). If the local part of the email address contains internationalized characters, `ascii_email` will be `None`. If set, it merely combines `ascii_local_part` and `ascii_domain`. | +| `local_part` | The local part of the given email address (before the @-sign) with Unicode NFC normalization applied. | +| `ascii_local_part` | If set, the local part, which is composed of ASCII characters only. | +| `domain` | The canonical internationalized Unicode form of the domain part of the email address. If the returned string contains non-ASCII characters, either the [SMTPUTF8](https://tools.ietf.org/html/rfc6531) feature of your mail relay will be required to transmit the message or else the email address's domain part must be converted to IDNA ASCII first: Use `ascii_domain` field instead. | +| `ascii_domain` | The [IDNA](https://tools.ietf.org/html/rfc5891) [Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt)-encoded form of the domain part of the given email address, as it would be transmitted on the wire. | +| `smtputf8` | A boolean indicating that the [SMTPUTF8](https://tools.ietf.org/html/rfc6531) feature of your mail relay will be required to transmit messages to this address because the local part of the address has non-ASCII characters (the local part cannot be IDNA-encoded). If `allow_smtputf8=False` is passed as an argument, this flag will always be false because an exception is raised if it would have been true. | +| `mx` | A list of (priority, domain) tuples of MX records specified in the DNS for the domain (see [RFC 5321 section 5](https://tools.ietf.org/html/rfc5321#section-5)). May be `None` if the deliverability check could not be completed because of a temporary issue like a timeout. | +| `mx_fallback_type` | `None` if an `MX` record is found. If no MX records are actually specified in DNS and instead are inferred, through an obsolete mechanism, from A or AAAA records, the value is the type of DNS record used instead (`A` or `AAAA`). May be `None` if the deliverability check could not be completed because of a temporary issue like a timeout. | + +Assumptions +----------- + +By design, this validator does not pass all email addresses that +strictly conform to the standards. Many email address forms are obsolete +or likely to cause trouble: + +* The validator assumes the email address is intended to be + deliverable on the public Internet. The domain part + of the email address must be a resolvable domain name + (without NULL MX or SPF -all DNS records). + Most [Special Use Domain Names](https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml) + and their subdomains are considered invalid (except see + the `test_environment` parameter above). +* The "quoted string" form of the local part of the email address (RFC + 5321 4.1.2) is not permitted --- no one uses this anymore anyway. + Quoted forms allow multiple @-signs, space characters, and other + troublesome conditions. The unsual [(comment) syntax](https://github.com/JoshData/python-email-validator/issues/77) + in email addresses is also rejected. +* The "literal" form for the domain part of an email address (an + IP address) is not accepted --- no one uses this anymore anyway. + +Testing +------- + +Tests can be run using + +```sh +pip install -r test_requirements.txt +make test +``` + +For Project Maintainers +----------------------- + +The package is distributed as a universal wheel and as a source package. + +To release: + +* Update the version number. +* Follow the steps below to publish source and a universal wheel to pypi. +* Make a release at https://github.com/JoshData/python-email-validator/releases/new. + +```sh +pip3 install twine +rm -rf dist +python3 setup.py sdist +python3 setup.py bdist_wheel +twine upload dist/* +git tag v1.0.XXX # replace with version in setup.cfg +git push --tags +``` + +Notes: The wheel is specified as universal in the file `setup.cfg` by the `universal = 1` key in the +`[bdist_wheel]` section. + + diff --git a/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/RECORD b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/RECORD new file mode 100644 index 0000000..ca50939 --- /dev/null +++ b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/RECORD @@ -0,0 +1,11 @@ +../../../bin/email_validator,sha256=hVrcTVmeSsY47RieTpQU4wVRs-alLNHPL140JVuXQQc,254 +email_validator-1.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +email_validator-1.2.1.dist-info/LICENSE,sha256=ogEPNDSH0_dhiv_lT3ifVIdgIzHAqNA_SemnxUfPBJk,7048 +email_validator-1.2.1.dist-info/METADATA,sha256=A-1gcB8Zf4b-E3jAjSxmOS5zjkzr_FL-gbBqQZCxzko,23000 +email_validator-1.2.1.dist-info/RECORD,, +email_validator-1.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +email_validator-1.2.1.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110 +email_validator-1.2.1.dist-info/entry_points.txt,sha256=wNs-98c7HUZ5a3_v6wkTha-8tSFn_QF1wzkLjRfaaMI,58 +email_validator-1.2.1.dist-info/top_level.txt,sha256=fYDOSWFZke46ut7WqdOAJjjhlpPYAaOwOwIsh3s8oWI,16 +email_validator/__init__.py,sha256=MfXerpocKGTVKSvNZ8NXcjWBztn-5N3eQnzNflgNi-E,32665 +email_validator/__pycache__/__init__.cpython-310.pyc,, diff --git a/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/REQUESTED b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/WHEEL b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/WHEEL new file mode 100644 index 0000000..0b18a28 --- /dev/null +++ b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/entry_points.txt new file mode 100644 index 0000000..5eeda3a --- /dev/null +++ b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +email_validator = email_validator:main + diff --git a/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/top_level.txt new file mode 100644 index 0000000..798fd5e --- /dev/null +++ b/venv/lib/python3.10/site-packages/email_validator-1.2.1.dist-info/top_level.txt @@ -0,0 +1 @@ +email_validator diff --git a/venv/lib/python3.10/site-packages/email_validator/__init__.py b/venv/lib/python3.10/site-packages/email_validator/__init__.py new file mode 100644 index 0000000..2223e32 --- /dev/null +++ b/venv/lib/python3.10/site-packages/email_validator/__init__.py @@ -0,0 +1,717 @@ +# -*- coding: utf-8 -*- + +import sys +import re +import unicodedata +import dns.resolver +import dns.exception +import idna # implements IDNA 2008; Python's codec is only IDNA 2003 + +# Default values for keyword arguments. + +ALLOW_SMTPUTF8 = True +CHECK_DELIVERABILITY = True +TEST_ENVIRONMENT = False +DEFAULT_TIMEOUT = 15 # secs + +# Based on RFC 2822 section 3.2.4 / RFC 5322 section 3.2.3, these +# characters are permitted in email addresses (not taking into +# account internationalization): +ATEXT = r'a-zA-Z0-9_!#\$%&\'\*\+\-/=\?\^`\{\|\}~' + +# A "dot atom text", per RFC 2822 3.2.4: +DOT_ATOM_TEXT = '[' + ATEXT + ']+(?:\\.[' + ATEXT + ']+)*' + +# RFC 6531 section 3.3 extends the allowed characters in internationalized +# addresses to also include three specific ranges of UTF8 defined in +# RFC3629 section 4, which appear to be the Unicode code points from +# U+0080 to U+10FFFF. +ATEXT_INTL = ATEXT + u"\u0080-\U0010FFFF" +DOT_ATOM_TEXT_INTL = '[' + ATEXT_INTL + ']+(?:\\.[' + ATEXT_INTL + ']+)*' + +# The domain part of the email address, after IDNA (ASCII) encoding, +# must also satisfy the requirements of RFC 952/RFC 1123 which restrict +# the allowed characters of hostnames further. The hyphen cannot be at +# the beginning or end of a *dot-atom component* of a hostname either. +ATEXT_HOSTNAME = r'(?:(?:[a-zA-Z0-9][a-zA-Z0-9\-]*)?[a-zA-Z0-9])' + +# Length constants +# RFC 3696 + errata 1003 + errata 1690 (https://www.rfc-editor.org/errata_search.php?rfc=3696&eid=1690) +# explains the maximum length of an email address is 254 octets. +EMAIL_MAX_LENGTH = 254 +LOCAL_PART_MAX_LENGTH = 64 +DOMAIN_MAX_LENGTH = 255 + +# IANA Special Use Domain Names +# Last Updated 2021-09-21 +# https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.txt +# +# The domain names without dots would be caught by the check that the domain +# name in an email address must have a period, but this list will also catch +# subdomains of these domains, which are also reserved. +SPECIAL_USE_DOMAIN_NAMES = [ + # The "arpa" entry here is consolidated from a lot of arpa subdomains + # for private address (i.e. non-routable IP addresses like 172.16.x.x) + # reverse mapping, plus some other subdomains. Although RFC 6761 says + # that application software should not treat these domains as special, + # they are private-use domains and so cannot have globally deliverable + # email addresses, which is an assumption of this library, and probably + # all of arpa is similarly special-use, so we reject it all. + "arpa", + + # RFC 6761 says applications "SHOULD NOT" treat the "example" domains + # as special, i.e. applications should accept these domains. + # + # The domain "example" alone fails our syntax validation because it + # lacks a dot (we assume no one has an email address on a TLD directly). + # "@example.com/net/org" will currently fail DNS-based deliverability + # checks because IANA publishes a NULL MX for these domains, and + # "@mail.example[.com/net/org]" and other subdomains will fail DNS- + # based deliverability checks because IANA does not publish MX or A + # DNS records for these subdomains. + # "example", # i.e. "wwww.example" + # "example.com", + # "example.net", + # "example.org", + + # RFC 6761 says that applications are permitted to treat this domain + # as special and that DNS should return an immediate negative response, + # so we also immediately reject this domain, which also follows the + # purpose of the domain. + "invalid", + + # RFC 6762 says that applications "may" treat ".local" as special and + # that "name resolution APIs and libraries SHOULD recognize these names + # as special," and since ".local" has no global definition, we reject + # it, as we expect email addresses to be gloally routable. + "local", + + # RFC 6761 says that applications (like this library) are permitted + # to treat "localhost" as special, and since it cannot have a globally + # deliverable email address, we reject it. + "localhost", + + # RFC 7686 says "applications that do not implement the Tor protocol + # SHOULD generate an error upon the use of .onion and SHOULD NOT + # perform a DNS lookup. + "onion", + + # Although RFC 6761 says that application software should not treat + # these domains as special, it also warns users that the address may + # resolve differently in different systems, and therefore it cannot + # have a globally routable email address, which is an assumption of + # this library, so we reject "@test" and "@*.test" addresses, unless + # the test_environment keyword argument is given, to allow their use + # in application-level test environments. These domains will generally + # fail deliverability checks because "test" is not an actual TLD. + "test", +] + +# ease compatibility in type checking +if sys.version_info >= (3,): + unicode_class = str +else: + unicode_class = unicode # noqa: F821 + + # turn regexes to unicode (because 'ur' literals are not allowed in Py3) + ATEXT = ATEXT.decode("ascii") + DOT_ATOM_TEXT = DOT_ATOM_TEXT.decode("ascii") + ATEXT_HOSTNAME = ATEXT_HOSTNAME.decode("ascii") + + +class EmailNotValidError(ValueError): + """Parent class of all exceptions raised by this module.""" + pass + + +class EmailSyntaxError(EmailNotValidError): + """Exception raised when an email address fails validation because of its form.""" + pass + + +class EmailUndeliverableError(EmailNotValidError): + """Exception raised when an email address fails validation because its domain name does not appear deliverable.""" + pass + + +class ValidatedEmail(object): + """The validate_email function returns objects of this type holding the normalized form of the email address + and other information.""" + + """The email address that was passed to validate_email. (If passed as bytes, this will be a string.)""" + original_email = None + + """The normalized email address, which should always be used in preferance to the original address. + The normalized address converts an IDNA ASCII domain name to Unicode, if possible, and performs + Unicode normalization on the local part and on the domain (if originally Unicode). It is the + concatenation of the local_part and domain attributes, separated by an @-sign.""" + email = None + + """The local part of the email address after Unicode normalization.""" + local_part = None + + """The domain part of the email address after Unicode normalization or conversion to + Unicode from IDNA ascii.""" + domain = None + + """If not None, a form of the email address that uses 7-bit ASCII characters only.""" + ascii_email = None + + """If not None, the local part of the email address using 7-bit ASCII characters only.""" + ascii_local_part = None + + """If not None, a form of the domain name that uses 7-bit ASCII characters only.""" + ascii_domain = None + + """If True, the SMTPUTF8 feature of your mail relay will be required to transmit messages + to this address. This flag is True just when ascii_local_part is missing. Otherwise it + is False.""" + smtputf8 = None + + """If a deliverability check is performed and if it succeeds, a list of (priority, domain) + tuples of MX records specified in the DNS for the domain.""" + mx = None + + """If no MX records are actually specified in DNS and instead are inferred, through an obsolete + mechanism, from A or AAAA records, the value is the type of DNS record used instead (`A` or `AAAA`).""" + mx_fallback_type = None + + """Tests use this constructor.""" + def __init__(self, **kwargs): + for k, v in kwargs.items(): + setattr(self, k, v) + + """As a convenience, str(...) on instances of this class return the normalized address.""" + def __self__(self): + return self.normalized_email + + def __repr__(self): + return "".format(self.email) + + """For backwards compatibility, some fields are also exposed through a dict-like interface. Note + that some of the names changed when they became attributes.""" + def __getitem__(self, key): + if key == "email": + return self.email + if key == "email_ascii": + return self.ascii_email + if key == "local": + return self.local_part + if key == "domain": + return self.ascii_domain + if key == "domain_i18n": + return self.domain + if key == "smtputf8": + return self.smtputf8 + if key == "mx": + return self.mx + if key == "mx-fallback": + return self.mx_fallback_type + raise KeyError() + + """Tests use this.""" + def __eq__(self, other): + if not isinstance(other, ValidatedEmail): + return False + return ( + self.email == other.email + and self.local_part == other.local_part + and self.domain == other.domain + and self.ascii_email == other.ascii_email + and self.ascii_local_part == other.ascii_local_part + and self.ascii_domain == other.ascii_domain + and self.smtputf8 == other.smtputf8 + and repr(sorted(self.mx) if self.mx else self.mx) + == repr(sorted(other.mx) if other.mx else other.mx) + and self.mx_fallback_type == other.mx_fallback_type + ) + + """This helps producing the README.""" + def as_constructor(self): + return "ValidatedEmail(" \ + + ",".join("\n {}={}".format( + key, + repr(getattr(self, key))) + for key in ('email', 'local_part', 'domain', + 'ascii_email', 'ascii_local_part', 'ascii_domain', + 'smtputf8', 'mx', 'mx_fallback_type') + ) \ + + ")" + + """Convenience method for accessing ValidatedEmail as a dict""" + def as_dict(self): + return self.__dict__ + + +def __get_length_reason(addr, utf8=False, limit=EMAIL_MAX_LENGTH): + diff = len(addr) - limit + reason = "({}{} character{} too many)" + prefix = "at least " if utf8 else "" + suffix = "s" if diff > 1 else "" + return reason.format(prefix, diff, suffix) + + +def caching_resolver(timeout=DEFAULT_TIMEOUT, cache=None): + resolver = dns.resolver.Resolver() + resolver.cache = cache or dns.resolver.LRUCache() + resolver.lifetime = timeout # timeout, in seconds + return resolver + + +def validate_email( + email, + allow_smtputf8=ALLOW_SMTPUTF8, + allow_empty_local=False, + check_deliverability=CHECK_DELIVERABILITY, + test_environment=TEST_ENVIRONMENT, + timeout=DEFAULT_TIMEOUT, + dns_resolver=None +): + """ + Validates an email address, raising an EmailNotValidError if the address is not valid or returning a dict of + information when the address is valid. The email argument can be a str or a bytes instance, + but if bytes it must be ASCII-only. + """ + + # Allow email to be a str or bytes instance. If bytes, + # it must be ASCII because that's how the bytes work + # on the wire with SMTP. + if not isinstance(email, (str, unicode_class)): + try: + email = email.decode("ascii") + except ValueError: + raise EmailSyntaxError("The email address is not valid ASCII.") + + # At-sign. + parts = email.split('@') + if len(parts) != 2: + raise EmailSyntaxError("The email address is not valid. It must have exactly one @-sign.") + + # Collect return values in this instance. + ret = ValidatedEmail() + ret.original_email = email + + # Validate the email address's local part syntax and get a normalized form. + local_part_info = validate_email_local_part(parts[0], + allow_smtputf8=allow_smtputf8, + allow_empty_local=allow_empty_local) + ret.local_part = local_part_info["local_part"] + ret.ascii_local_part = local_part_info["ascii_local_part"] + ret.smtputf8 = local_part_info["smtputf8"] + + # Validate the email address's domain part syntax and get a normalized form. + domain_part_info = validate_email_domain_part(parts[1], test_environment=test_environment) + ret.domain = domain_part_info["domain"] + ret.ascii_domain = domain_part_info["ascii_domain"] + + # Construct the complete normalized form. + ret.email = ret.local_part + "@" + ret.domain + + # If the email address has an ASCII form, add it. + if not ret.smtputf8: + ret.ascii_email = ret.ascii_local_part + "@" + ret.ascii_domain + + # If the email address has an ASCII representation, then we assume it may be + # transmitted in ASCII (we can't assume SMTPUTF8 will be used on all hops to + # the destination) and the length limit applies to ASCII characters (which is + # the same as octets). The number of characters in the internationalized form + # may be many fewer (because IDNA ASCII is verbose) and could be less than 254 + # Unicode characters, and of course the number of octets over the limit may + # not be the number of characters over the limit, so if the email address is + # internationalized, we can't give any simple information about why the address + # is too long. + # + # In addition, check that the UTF-8 encoding (i.e. not IDNA ASCII and not + # Unicode characters) is at most 254 octets. If the addres is transmitted using + # SMTPUTF8, then the length limit probably applies to the UTF-8 encoded octets. + # If the email address has an ASCII form that differs from its internationalized + # form, I don't think the internationalized form can be longer, and so the ASCII + # form length check would be sufficient. If there is no ASCII form, then we have + # to check the UTF-8 encoding. The UTF-8 encoding could be up to about four times + # longer than the number of characters. + # + # See the length checks on the local part and the domain. + if ret.ascii_email and len(ret.ascii_email) > EMAIL_MAX_LENGTH: + if ret.ascii_email == ret.email: + reason = __get_length_reason(ret.ascii_email) + elif len(ret.email) > EMAIL_MAX_LENGTH: + # If there are more than 254 characters, then the ASCII + # form is definitely going to be too long. + reason = __get_length_reason(ret.email, utf8=True) + else: + reason = "(when converted to IDNA ASCII)" + raise EmailSyntaxError("The email address is too long {}.".format(reason)) + if len(ret.email.encode("utf8")) > EMAIL_MAX_LENGTH: + if len(ret.email) > EMAIL_MAX_LENGTH: + # If there are more than 254 characters, then the UTF-8 + # encoding is definitely going to be too long. + reason = __get_length_reason(ret.email, utf8=True) + else: + reason = "(when encoded in bytes)" + raise EmailSyntaxError("The email address is too long {}.".format(reason)) + + if check_deliverability and not test_environment: + # Validate the email address's deliverability using DNS + # and update the return dict with metadata. + deliverability_info = validate_email_deliverability( + ret["domain"], ret["domain_i18n"], timeout, dns_resolver + ) + if "mx" in deliverability_info: + ret.mx = deliverability_info["mx"] + ret.mx_fallback_type = deliverability_info["mx-fallback"] + + return ret + + +def validate_email_local_part(local, allow_smtputf8=True, allow_empty_local=False): + # Validates the local part of an email address. + + if len(local) == 0: + if not allow_empty_local: + raise EmailSyntaxError("There must be something before the @-sign.") + else: + # The caller allows an empty local part. Useful for validating certain + # Postfix aliases. + return { + "local_part": local, + "ascii_local_part": local, + "smtputf8": False, + } + + # RFC 5321 4.5.3.1.1 + # We're checking the number of characters here. If the local part + # is ASCII-only, then that's the same as bytes (octets). If it's + # internationalized, then the UTF-8 encoding may be longer, but + # that may not be relevant. We will check the total address length + # instead. + if len(local) > LOCAL_PART_MAX_LENGTH: + reason = __get_length_reason(local, limit=LOCAL_PART_MAX_LENGTH) + raise EmailSyntaxError("The email address is too long before the @-sign {}.".format(reason)) + + # Check the local part against the regular expression for the older ASCII requirements. + m = re.match(DOT_ATOM_TEXT + "\\Z", local) + if m: + # Return the local part unchanged and flag that SMTPUTF8 is not needed. + return { + "local_part": local, + "ascii_local_part": local, + "smtputf8": False, + } + + else: + # The local part failed the ASCII check. Now try the extended internationalized requirements. + m = re.match(DOT_ATOM_TEXT_INTL + "\\Z", local) + if not m: + # It's not a valid internationalized address either. Report which characters were not valid. + bad_chars = ', '.join(sorted(set( + unicodedata.name(c, repr(c)) for c in local if not re.match(u"[" + (ATEXT if not allow_smtputf8 else ATEXT_INTL) + u"]", c) + ))) + raise EmailSyntaxError("The email address contains invalid characters before the @-sign: %s." % bad_chars) + + # It would be valid if internationalized characters were allowed by the caller. + if not allow_smtputf8: + raise EmailSyntaxError("Internationalized characters before the @-sign are not supported.") + + # It's valid. + + # RFC 6532 section 3.1 also says that Unicode NFC normalization should be applied, + # so we'll return the normalized local part in the return value. + local = unicodedata.normalize("NFC", local) + + # Check for unsafe characters. + # Some of this may be redundant with the range U+0080 to U+10FFFF that is checked + # by DOT_ATOM_TEXT_INTL. + for i, c in enumerate(local): + category = unicodedata.category(c) + if category[0] in ("L", "N", "P", "S"): + # letters, numbers, punctuation, and symbols are permitted + pass + elif category[0] == "M": + # combining character in first position would combine with something + # outside of the email address if concatenated to the right, but are + # otherwise permitted + if i == 0: + raise EmailSyntaxError("The email address contains an initial invalid character (%s)." + % unicodedata.name(c, repr(c))) + elif category[0] in ("Z", "C"): + # spaces and line/paragraph characters (Z) and + # control, format, surrogate, private use, and unassigned code points (C) + raise EmailSyntaxError("The email address contains an invalid character (%s)." + % unicodedata.name(c, repr(c))) + else: + # All categories should be handled above, but in case there is something new + # in the future. + raise EmailSyntaxError("The email address contains a character (%s; category %s) that may not be safe." + % (unicodedata.name(c, repr(c)), category)) + + # Try encoding to UTF-8. Failure is possible with some characters like + # surrogate code points, but those are checked above. Still, we don't + # want to have an unhandled exception later. + try: + local.encode("utf8") + except ValueError: + raise EmailSyntaxError("The email address contains an invalid character.") + + # Flag that SMTPUTF8 will be required for deliverability. + return { + "local_part": local, + "ascii_local_part": None, # no ASCII form is possible + "smtputf8": True, + } + + +def validate_email_domain_part(domain, test_environment=False): + # Empty? + if len(domain) == 0: + raise EmailSyntaxError("There must be something after the @-sign.") + + # Perform UTS-46 normalization, which includes casefolding, NFC normalization, + # and converting all label separators (the period/full stop, fullwidth full stop, + # ideographic full stop, and halfwidth ideographic full stop) to basic periods. + # It will also raise an exception if there is an invalid character in the input, + # such as "⒈" which is invalid because it would expand to include a period. + try: + domain = idna.uts46_remap(domain, std3_rules=False, transitional=False) + except idna.IDNAError as e: + raise EmailSyntaxError("The domain name %s contains invalid characters (%s)." % (domain, str(e))) + + # Now we can perform basic checks on the use of periods (since equivalent + # symbols have been mapped to periods). These checks are needed because the + # IDNA library doesn't handle well domains that have empty labels (i.e. initial + # dot, trailing dot, or two dots in a row). + if domain.endswith("."): + raise EmailSyntaxError("An email address cannot end with a period.") + if domain.startswith("."): + raise EmailSyntaxError("An email address cannot have a period immediately after the @-sign.") + if ".." in domain: + raise EmailSyntaxError("An email address cannot have two periods in a row.") + + # Regardless of whether international characters are actually used, + # first convert to IDNA ASCII. For ASCII-only domains, the transformation + # does nothing. If internationalized characters are present, the MTA + # must either support SMTPUTF8 or the mail client must convert the + # domain name to IDNA before submission. + # + # Unfortunately this step incorrectly 'fixes' domain names with leading + # periods by removing them, so we have to check for this above. It also gives + # a funky error message ("No input") when there are two periods in a + # row, also checked separately above. + try: + ascii_domain = idna.encode(domain, uts46=False).decode("ascii") + except idna.IDNAError as e: + if "Domain too long" in str(e): + # We can't really be more specific because UTS-46 normalization means + # the length check is applied to a string that is different from the + # one the user supplied. Also I'm not sure if the length check applies + # to the internationalized form, the IDNA ASCII form, or even both! + raise EmailSyntaxError("The email address is too long after the @-sign.") + raise EmailSyntaxError("The domain name %s contains invalid characters (%s)." % (domain, str(e))) + + # We may have been given an IDNA ASCII domain to begin with. Check + # that the domain actually conforms to IDNA. It could look like IDNA + # but not be actual IDNA. For ASCII-only domains, the conversion out + # of IDNA just gives the same thing back. + # + # This gives us the canonical internationalized form of the domain, + # which we should use in all error messages. + try: + domain_i18n = idna.decode(ascii_domain.encode('ascii')) + except idna.IDNAError as e: + raise EmailSyntaxError("The domain name %s is not valid IDNA (%s)." % (ascii_domain, str(e))) + + # RFC 5321 4.5.3.1.2 + # We're checking the number of bytes (octets) here, which can be much + # higher than the number of characters in internationalized domains, + # on the assumption that the domain may be transmitted without SMTPUTF8 + # as IDNA ASCII. This is also checked by idna.encode, so this exception + # is never reached. + if len(ascii_domain) > DOMAIN_MAX_LENGTH: + raise EmailSyntaxError("The email address is too long after the @-sign.") + + # A "dot atom text", per RFC 2822 3.2.4, but using the restricted + # characters allowed in a hostname (see ATEXT_HOSTNAME above). + DOT_ATOM_TEXT = ATEXT_HOSTNAME + r'(?:\.' + ATEXT_HOSTNAME + r')*' + + # Check the regular expression. This is probably entirely redundant + # with idna.decode, which also checks this format. + m = re.match(DOT_ATOM_TEXT + "\\Z", ascii_domain) + if not m: + raise EmailSyntaxError("The email address contains invalid characters after the @-sign.") + + # All publicly deliverable addresses have domain named with at least + # one period, and we'll consider the lack of a period a syntax error + # since that will match people's sense of what an email address looks + # like. We'll skip this in test environments to allow '@test' email + # addresses. + if "." not in ascii_domain and not (ascii_domain == "test" and test_environment): + raise EmailSyntaxError("The domain name %s is not valid. It should have a period." % domain_i18n) + + # Check special-use and reserved domain names. Raise these as + # deliverability errors since they are syntactically valid. + # Some might fail DNS-based deliverability checks, but that + # can be turned off, so we should fail them all sooner. + for d in SPECIAL_USE_DOMAIN_NAMES: + # See the note near the definition of SPECIAL_USE_DOMAIN_NAMES. + if d == "test" and test_environment: + continue + + if ascii_domain == d or ascii_domain.endswith("." + d): + raise EmailUndeliverableError("The domain name %s is a special-use or reserved name that cannot be used with email." % domain_i18n) + + # We also know that all TLDs currently end with a letter, and + # we'll consider that a non-DNS based deliverability check. + if not re.search(r"[A-Za-z]\Z", ascii_domain): + raise EmailUndeliverableError( + "The domain name %s is not valid. It is not within a valid top-level domain." % domain_i18n + ) + + # Return the IDNA ASCII-encoded form of the domain, which is how it + # would be transmitted on the wire (except when used with SMTPUTF8 + # possibly), as well as the canonical Unicode form of the domain, + # which is better for display purposes. This should also take care + # of RFC 6532 section 3.1's suggestion to apply Unicode NFC + # normalization to addresses. + return { + "ascii_domain": ascii_domain, + "domain": domain_i18n, + } + + +def validate_email_deliverability(domain, domain_i18n, timeout=DEFAULT_TIMEOUT, dns_resolver=None): + # Check that the domain resolves to an MX record. If there is no MX record, + # try an A or AAAA record which is a deprecated fallback for deliverability. + + # If no dns.resolver.Resolver was given, get dnspython's default resolver. + # Override the default resolver's timeout. This may affect other uses of + # dnspython in this process. + if dns_resolver is None: + dns_resolver = dns.resolver.get_default_resolver() + dns_resolver.lifetime = timeout + + def dns_resolver_resolve_shim(domain, record): + try: + # dns.resolver.Resolver.resolve is new to dnspython 2.x. + # https://dnspython.readthedocs.io/en/latest/resolver-class.html#dns.resolver.Resolver.resolve + return dns_resolver.resolve(domain, record) + except AttributeError: + # dnspython 2.x is only available in Python 3.6 and later. For earlier versions + # of Python, we maintain compatibility with dnspython 1.x which has a + # dnspython.resolver.Resolver.query method instead. The only difference is that + # query may treat the domain as relative and use the system's search domains, + # which we prevent by adding a "." to the domain name to make it absolute. + # dns.resolver.Resolver.query is deprecated in dnspython version 2.x. + # https://dnspython.readthedocs.io/en/latest/resolver-class.html#dns.resolver.Resolver.query + return dns_resolver.query(domain + ".", record) + + try: + # We need a way to check how timeouts are handled in the tests. So we + # have a secret variable that if set makes this method always test the + # handling of a timeout. + if getattr(validate_email_deliverability, 'TEST_CHECK_TIMEOUT', False): + raise dns.exception.Timeout() + + try: + # Try resolving for MX records and get them in sorted priority order + # as (priority, qname) pairs. + response = dns_resolver_resolve_shim(domain, "MX") + mtas = sorted([(r.preference, str(r.exchange).rstrip('.')) for r in response]) + mx_fallback = None + + # Do not permit delivery if there is only a "null MX" record (whose value is + # (0, ".") but we've stripped trailing dots, so the 'exchange' is just ""). + mtas = [(preference, exchange) for preference, exchange in mtas + if exchange != ""] + if len(mtas) == 0: + raise EmailUndeliverableError("The domain name %s does not accept email." % domain_i18n) + + except (dns.resolver.NoNameservers, dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + + # If there was no MX record, fall back to an A record. + try: + response = dns_resolver_resolve_shim(domain, "A") + mtas = [(0, str(r)) for r in response] + mx_fallback = "A" + except (dns.resolver.NoNameservers, dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + + # If there was no A record, fall back to an AAAA record. + try: + response = dns_resolver_resolve_shim(domain, "AAAA") + mtas = [(0, str(r)) for r in response] + mx_fallback = "AAAA" + except (dns.resolver.NoNameservers, dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + + # If there was no MX, A, or AAAA record, then mail to + # this domain is not deliverable. + raise EmailUndeliverableError("The domain name %s does not exist." % domain_i18n) + + try: + # Check for a SPF reject all ("v=spf1; -all") record which indicates + # no emails are sent from this domain, which like a NULL MX record + # would indicate that the domain is not used for email. + response = dns_resolver_resolve_shim(domain, "TXT") + for rec in response: + if str(rec) == "v=spf1 -all": + raise EmailUndeliverableError("The domain name %s does not send email." % domain_i18n) + except dns.resolver.NoAnswer: + # No TXT records means there is no SPF policy, so we cannot take any action. + pass + except (dns.resolver.NoNameservers, dns.resolver.NXDOMAIN): + # Failure to resolve at this step will be ignored. + pass + + except dns.exception.Timeout: + # A timeout could occur for various reasons, so don't treat it as a failure. + return { + "unknown-deliverability": "timeout", + } + + except EmailUndeliverableError: + # Don't let these get clobbered by the wider except block below. + raise + + except Exception as e: + # Unhandled conditions should not propagate. + raise EmailUndeliverableError( + "There was an error while checking if the domain name in the email address is deliverable: " + str(e) + ) + + return { + "mx": mtas, + "mx-fallback": mx_fallback, + } + + +def main(): + import json + + def __utf8_input_shim(input_str): + if sys.version_info < (3,): + return input_str.decode("utf-8") + return input_str + + def __utf8_output_shim(output_str): + if sys.version_info < (3,): + return unicode_class(output_str).encode("utf-8") + return output_str + + if len(sys.argv) == 1: + # Validate the email addresses pased line-by-line on STDIN. + dns_resolver = caching_resolver() + for line in sys.stdin: + email = __utf8_input_shim(line.strip()) + try: + validate_email(email, dns_resolver=dns_resolver) + except EmailNotValidError as e: + print(__utf8_output_shim("{} {}".format(email, e))) + else: + # Validate the email address passed on the command line. + email = __utf8_input_shim(sys.argv[1]) + try: + result = validate_email(email) + print(json.dumps(result.as_dict(), indent=2, sort_keys=True, ensure_ascii=False)) + except EmailNotValidError as e: + print(__utf8_output_shim(e)) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.10/site-packages/email_validator/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/email_validator/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..05af2f5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/email_validator/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/LICENSE b/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/LICENSE new file mode 100644 index 0000000..3e92463 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Sebastián Ramírez + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/METADATA b/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/METADATA new file mode 100644 index 0000000..b6ad36e --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/METADATA @@ -0,0 +1,548 @@ +Metadata-Version: 2.1 +Name: fastapi +Version: 0.75.2 +Summary: FastAPI framework, high performance, easy to learn, fast to code, ready for production +Home-page: https://github.com/tiangolo/fastapi +Author: Sebastián Ramírez +Author-email: tiangolo@gmail.com +Requires-Python: >=3.6.1 +Description-Content-Type: text/markdown +Classifier: Intended Audience :: Information Technology +Classifier: Intended Audience :: System Administrators +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development +Classifier: Typing :: Typed +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: FastAPI +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Dist: starlette ==0.17.1 +Requires-Dist: pydantic >=1.6.2,!=1.7,!=1.7.1,!=1.7.2,!=1.7.3,!=1.8,!=1.8.1,<2.0.0 +Requires-Dist: requests >=2.24.0,<3.0.0 ; extra == "all" +Requires-Dist: jinja2 >=2.11.2,<4.0.0 ; extra == "all" +Requires-Dist: python-multipart >=0.0.5,<0.0.6 ; extra == "all" +Requires-Dist: itsdangerous >=1.1.0,<3.0.0 ; extra == "all" +Requires-Dist: pyyaml >=5.3.1,<7.0.0 ; extra == "all" +Requires-Dist: ujson >=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0 ; extra == "all" +Requires-Dist: orjson >=3.2.1,<4.0.0 ; extra == "all" +Requires-Dist: email_validator >=1.1.1,<2.0.0 ; extra == "all" +Requires-Dist: uvicorn[standard] >=0.12.0,<0.18.0 ; extra == "all" +Requires-Dist: python-jose[cryptography] >=3.3.0,<4.0.0 ; extra == "dev" +Requires-Dist: passlib[bcrypt] >=1.7.2,<2.0.0 ; extra == "dev" +Requires-Dist: autoflake >=1.4.0,<2.0.0 ; extra == "dev" +Requires-Dist: flake8 >=3.8.3,<4.0.0 ; extra == "dev" +Requires-Dist: uvicorn[standard] >=0.12.0,<0.18.0 ; extra == "dev" +Requires-Dist: mkdocs >=1.1.2,<2.0.0 ; extra == "doc" +Requires-Dist: mkdocs-material >=8.1.4,<9.0.0 ; extra == "doc" +Requires-Dist: mdx-include >=1.4.1,<2.0.0 ; extra == "doc" +Requires-Dist: mkdocs-markdownextradata-plugin >=0.1.7,<0.3.0 ; extra == "doc" +Requires-Dist: typer >=0.4.1,<0.5.0 ; extra == "doc" +Requires-Dist: pyyaml >=5.3.1,<7.0.0 ; extra == "doc" +Requires-Dist: pytest >=6.2.4,<7.0.0 ; extra == "test" +Requires-Dist: pytest-cov >=2.12.0,<4.0.0 ; extra == "test" +Requires-Dist: mypy ==0.910 ; extra == "test" +Requires-Dist: flake8 >=3.8.3,<4.0.0 ; extra == "test" +Requires-Dist: black == 22.3.0 ; extra == "test" +Requires-Dist: isort >=5.0.6,<6.0.0 ; extra == "test" +Requires-Dist: requests >=2.24.0,<3.0.0 ; extra == "test" +Requires-Dist: httpx >=0.14.0,<0.19.0 ; extra == "test" +Requires-Dist: email_validator >=1.1.1,<2.0.0 ; extra == "test" +Requires-Dist: sqlalchemy >=1.3.18,<1.5.0 ; extra == "test" +Requires-Dist: peewee >=3.13.3,<4.0.0 ; extra == "test" +Requires-Dist: databases[sqlite] >=0.3.2,<0.6.0 ; extra == "test" +Requires-Dist: orjson >=3.2.1,<4.0.0 ; extra == "test" +Requires-Dist: ujson >=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0 ; extra == "test" +Requires-Dist: python-multipart >=0.0.5,<0.0.6 ; extra == "test" +Requires-Dist: flask >=1.1.2,<3.0.0 ; extra == "test" +Requires-Dist: anyio[trio] >=3.2.1,<4.0.0 ; extra == "test" +Requires-Dist: types-ujson ==4.2.1 ; extra == "test" +Requires-Dist: types-orjson ==3.6.2 ; extra == "test" +Requires-Dist: types-dataclasses ==0.6.5 ; extra == "test" and ( python_version<'3.7') +Project-URL: Documentation, https://fastapi.tiangolo.com/ +Provides-Extra: all +Provides-Extra: dev +Provides-Extra: doc +Provides-Extra: test + +

    + FastAPI +

    +

    + FastAPI framework, high performance, easy to learn, fast to code, ready for production +

    +

    + + Test + + + Coverage + + + Package version + + + Supported Python versions + +

    + +--- + +**Documentation**: https://fastapi.tiangolo.com + +**Source Code**: https://github.com/tiangolo/fastapi + +--- + +FastAPI is a modern, fast (high-performance), web framework for building APIs with Python 3.6+ based on standard Python type hints. + +The key features are: + +* **Fast**: Very high performance, on par with **NodeJS** and **Go** (thanks to Starlette and Pydantic). [One of the fastest Python frameworks available](#performance). + +* **Fast to code**: Increase the speed to develop features by about 200% to 300%. * +* **Fewer bugs**: Reduce about 40% of human (developer) induced errors. * +* **Intuitive**: Great editor support. Completion everywhere. Less time debugging. +* **Easy**: Designed to be easy to use and learn. Less time reading docs. +* **Short**: Minimize code duplication. Multiple features from each parameter declaration. Fewer bugs. +* **Robust**: Get production-ready code. With automatic interactive documentation. +* **Standards-based**: Based on (and fully compatible with) the open standards for APIs: OpenAPI (previously known as Swagger) and JSON Schema. + +* estimation based on tests on an internal development team, building production applications. + +## Sponsors + + + + + + + + + + + + + + + +Other sponsors + +## Opinions + +"_[...] I'm using **FastAPI** a ton these days. [...] I'm actually planning to use it for all of my team's **ML services at Microsoft**. Some of them are getting integrated into the core **Windows** product and some **Office** products._" + +
    Kabir Khan - Microsoft (ref)
    + +--- + +"_We adopted the **FastAPI** library to spawn a **REST** server that can be queried to obtain **predictions**. [for Ludwig]_" + +
    Piero Molino, Yaroslav Dudin, and Sai Sumanth Miryala - Uber (ref)
    + +--- + +"_**Netflix** is pleased to announce the open-source release of our **crisis management** orchestration framework: **Dispatch**! [built with **FastAPI**]_" + +
    Kevin Glisson, Marc Vilanova, Forest Monsen - Netflix (ref)
    + +--- + +"_I’m over the moon excited about **FastAPI**. It’s so fun!_" + +
    Brian Okken - Python Bytes podcast host (ref)
    + +--- + +"_Honestly, what you've built looks super solid and polished. In many ways, it's what I wanted **Hug** to be - it's really inspiring to see someone build that._" + +
    Timothy Crosley - Hug creator (ref)
    + +--- + +"_If you're looking to learn one **modern framework** for building REST APIs, check out **FastAPI** [...] It's fast, easy to use and easy to learn [...]_" + +"_We've switched over to **FastAPI** for our **APIs** [...] I think you'll like it [...]_" + +
    Ines Montani - Matthew Honnibal - Explosion AI founders - spaCy creators (ref) - (ref)
    + +--- + +## **Typer**, the FastAPI of CLIs + + + +If you are building a CLI app to be used in the terminal instead of a web API, check out **Typer**. + +**Typer** is FastAPI's little sibling. And it's intended to be the **FastAPI of CLIs**. ⌨️ 🚀 + +## Requirements + +Python 3.6+ + +FastAPI stands on the shoulders of giants: + +* Starlette for the web parts. +* Pydantic for the data parts. + +## Installation + +
    + +```console +$ pip install fastapi + +---> 100% +``` + +
    + +You will also need an ASGI server, for production such as Uvicorn or Hypercorn. + +
    + +```console +$ pip install "uvicorn[standard]" + +---> 100% +``` + +
    + +## Example + +### Create it + +* Create a file `main.py` with: + +```Python +from typing import Optional + +from fastapi import FastAPI + +app = FastAPI() + + +@app.get("/") +def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +def read_item(item_id: int, q: Optional[str] = None): + return {"item_id": item_id, "q": q} +``` + +
    +Or use async def... + +If your code uses `async` / `await`, use `async def`: + +```Python hl_lines="9 14" +from typing import Optional + +from fastapi import FastAPI + +app = FastAPI() + + +@app.get("/") +async def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +async def read_item(item_id: int, q: Optional[str] = None): + return {"item_id": item_id, "q": q} +``` + +**Note**: + +If you don't know, check the _"In a hurry?"_ section about `async` and `await` in the docs. + +
    + +### Run it + +Run the server with: + +
    + +```console +$ uvicorn main:app --reload + +INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit) +INFO: Started reloader process [28720] +INFO: Started server process [28722] +INFO: Waiting for application startup. +INFO: Application startup complete. +``` + +
    + +
    +About the command uvicorn main:app --reload... + +The command `uvicorn main:app` refers to: + +* `main`: the file `main.py` (the Python "module"). +* `app`: the object created inside of `main.py` with the line `app = FastAPI()`. +* `--reload`: make the server restart after code changes. Only do this for development. + +
    + +### Check it + +Open your browser at http://127.0.0.1:8000/items/5?q=somequery. + +You will see the JSON response as: + +```JSON +{"item_id": 5, "q": "somequery"} +``` + +You already created an API that: + +* Receives HTTP requests in the _paths_ `/` and `/items/{item_id}`. +* Both _paths_ take `GET` operations (also known as HTTP _methods_). +* The _path_ `/items/{item_id}` has a _path parameter_ `item_id` that should be an `int`. +* The _path_ `/items/{item_id}` has an optional `str` _query parameter_ `q`. + +### Interactive API docs + +Now go to http://127.0.0.1:8000/docs. + +You will see the automatic interactive API documentation (provided by Swagger UI): + +![Swagger UI](https://fastapi.tiangolo.com/img/index/index-01-swagger-ui-simple.png) + +### Alternative API docs + +And now, go to http://127.0.0.1:8000/redoc. + +You will see the alternative automatic documentation (provided by ReDoc): + +![ReDoc](https://fastapi.tiangolo.com/img/index/index-02-redoc-simple.png) + +## Example upgrade + +Now modify the file `main.py` to receive a body from a `PUT` request. + +Declare the body using standard Python types, thanks to Pydantic. + +```Python hl_lines="4 9-12 25-27" +from typing import Optional + +from fastapi import FastAPI +from pydantic import BaseModel + +app = FastAPI() + + +class Item(BaseModel): + name: str + price: float + is_offer: Optional[bool] = None + + +@app.get("/") +def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +def read_item(item_id: int, q: Optional[str] = None): + return {"item_id": item_id, "q": q} + + +@app.put("/items/{item_id}") +def update_item(item_id: int, item: Item): + return {"item_name": item.name, "item_id": item_id} +``` + +The server should reload automatically (because you added `--reload` to the `uvicorn` command above). + +### Interactive API docs upgrade + +Now go to http://127.0.0.1:8000/docs. + +* The interactive API documentation will be automatically updated, including the new body: + +![Swagger UI](https://fastapi.tiangolo.com/img/index/index-03-swagger-02.png) + +* Click on the button "Try it out", it allows you to fill the parameters and directly interact with the API: + +![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-04-swagger-03.png) + +* Then click on the "Execute" button, the user interface will communicate with your API, send the parameters, get the results and show them on the screen: + +![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-05-swagger-04.png) + +### Alternative API docs upgrade + +And now, go to http://127.0.0.1:8000/redoc. + +* The alternative documentation will also reflect the new query parameter and body: + +![ReDoc](https://fastapi.tiangolo.com/img/index/index-06-redoc-02.png) + +### Recap + +In summary, you declare **once** the types of parameters, body, etc. as function parameters. + +You do that with standard modern Python types. + +You don't have to learn a new syntax, the methods or classes of a specific library, etc. + +Just standard **Python 3.6+**. + +For example, for an `int`: + +```Python +item_id: int +``` + +or for a more complex `Item` model: + +```Python +item: Item +``` + +...and with that single declaration you get: + +* Editor support, including: + * Completion. + * Type checks. +* Validation of data: + * Automatic and clear errors when the data is invalid. + * Validation even for deeply nested JSON objects. +* Conversion of input data: coming from the network to Python data and types. Reading from: + * JSON. + * Path parameters. + * Query parameters. + * Cookies. + * Headers. + * Forms. + * Files. +* Conversion of output data: converting from Python data and types to network data (as JSON): + * Convert Python types (`str`, `int`, `float`, `bool`, `list`, etc). + * `datetime` objects. + * `UUID` objects. + * Database models. + * ...and many more. +* Automatic interactive API documentation, including 2 alternative user interfaces: + * Swagger UI. + * ReDoc. + +--- + +Coming back to the previous code example, **FastAPI** will: + +* Validate that there is an `item_id` in the path for `GET` and `PUT` requests. +* Validate that the `item_id` is of type `int` for `GET` and `PUT` requests. + * If it is not, the client will see a useful, clear error. +* Check if there is an optional query parameter named `q` (as in `http://127.0.0.1:8000/items/foo?q=somequery`) for `GET` requests. + * As the `q` parameter is declared with `= None`, it is optional. + * Without the `None` it would be required (as is the body in the case with `PUT`). +* For `PUT` requests to `/items/{item_id}`, Read the body as JSON: + * Check that it has a required attribute `name` that should be a `str`. + * Check that it has a required attribute `price` that has to be a `float`. + * Check that it has an optional attribute `is_offer`, that should be a `bool`, if present. + * All this would also work for deeply nested JSON objects. +* Convert from and to JSON automatically. +* Document everything with OpenAPI, that can be used by: + * Interactive documentation systems. + * Automatic client code generation systems, for many languages. +* Provide 2 interactive documentation web interfaces directly. + +--- + +We just scratched the surface, but you already get the idea of how it all works. + +Try changing the line with: + +```Python + return {"item_name": item.name, "item_id": item_id} +``` + +...from: + +```Python + ... "item_name": item.name ... +``` + +...to: + +```Python + ... "item_price": item.price ... +``` + +...and see how your editor will auto-complete the attributes and know their types: + +![editor support](https://fastapi.tiangolo.com/img/vscode-completion.png) + +For a more complete example including more features, see the Tutorial - User Guide. + +**Spoiler alert**: the tutorial - user guide includes: + +* Declaration of **parameters** from other different places as: **headers**, **cookies**, **form fields** and **files**. +* How to set **validation constraints** as `maximum_length` or `regex`. +* A very powerful and easy to use **Dependency Injection** system. +* Security and authentication, including support for **OAuth2** with **JWT tokens** and **HTTP Basic** auth. +* More advanced (but equally easy) techniques for declaring **deeply nested JSON models** (thanks to Pydantic). +* **GraphQL** integration with Strawberry and other libraries. +* Many extra features (thanks to Starlette) as: + * **WebSockets** + * extremely easy tests based on `requests` and `pytest` + * **CORS** + * **Cookie Sessions** + * ...and more. + +## Performance + +Independent TechEmpower benchmarks show **FastAPI** applications running under Uvicorn as one of the fastest Python frameworks available, only below Starlette and Uvicorn themselves (used internally by FastAPI). (*) + +To understand more about it, see the section Benchmarks. + +## Optional Dependencies + +Used by Pydantic: + +* ujson - for faster JSON "parsing". +* email_validator - for email validation. + +Used by Starlette: + +* requests - Required if you want to use the `TestClient`. +* jinja2 - Required if you want to use the default template configuration. +* python-multipart - Required if you want to support form "parsing", with `request.form()`. +* itsdangerous - Required for `SessionMiddleware` support. +* pyyaml - Required for Starlette's `SchemaGenerator` support (you probably don't need it with FastAPI). +* ujson - Required if you want to use `UJSONResponse`. + +Used by FastAPI / Starlette: + +* uvicorn - for the server that loads and serves your application. +* orjson - Required if you want to use `ORJSONResponse`. + +You can install all of these with `pip install "fastapi[all]"`. + +## License + +This project is licensed under the terms of the MIT license. + diff --git a/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/RECORD b/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/RECORD new file mode 100644 index 0000000..c1f86cb --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/RECORD @@ -0,0 +1,91 @@ +fastapi-0.75.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +fastapi-0.75.2.dist-info/LICENSE,sha256=Tsif_IFIW5f-xYSy1KlhAy7v_oNEU4lP2cEnSQbMdE4,1086 +fastapi-0.75.2.dist-info/METADATA,sha256=DpQsMahfe-73ElLD9C7B-LDLltqSR3dcnDWB5cBUPAQ,24531 +fastapi-0.75.2.dist-info/RECORD,, +fastapi-0.75.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi-0.75.2.dist-info/WHEEL,sha256=4TfKIB_xu-04bc2iKz6_zFt-gEFEEDU_31HGhqzOCE8,81 +fastapi/__init__.py,sha256=wIuEHeKRUob_GzohEmBRH1C2RjxDTz_qRWCd3_Bvn3Q,1015 +fastapi/__pycache__/__init__.cpython-310.pyc,, +fastapi/__pycache__/applications.cpython-310.pyc,, +fastapi/__pycache__/background.cpython-310.pyc,, +fastapi/__pycache__/concurrency.cpython-310.pyc,, +fastapi/__pycache__/datastructures.cpython-310.pyc,, +fastapi/__pycache__/encoders.cpython-310.pyc,, +fastapi/__pycache__/exception_handlers.cpython-310.pyc,, +fastapi/__pycache__/exceptions.cpython-310.pyc,, +fastapi/__pycache__/logger.cpython-310.pyc,, +fastapi/__pycache__/param_functions.cpython-310.pyc,, +fastapi/__pycache__/params.cpython-310.pyc,, +fastapi/__pycache__/requests.cpython-310.pyc,, +fastapi/__pycache__/responses.cpython-310.pyc,, +fastapi/__pycache__/routing.cpython-310.pyc,, +fastapi/__pycache__/staticfiles.cpython-310.pyc,, +fastapi/__pycache__/templating.cpython-310.pyc,, +fastapi/__pycache__/testclient.cpython-310.pyc,, +fastapi/__pycache__/types.cpython-310.pyc,, +fastapi/__pycache__/utils.cpython-310.pyc,, +fastapi/__pycache__/websockets.cpython-310.pyc,, +fastapi/applications.py,sha256=B9oxYaliDut9i52xPJKUFQadJfwjsBGttMZ9et1Dch4,38218 +fastapi/background.py,sha256=HtN5_pJJrOdalSbuGSMKJAPNWUU5h7rY_BXXubu7-IQ,76 +fastapi/concurrency.py,sha256=ne71p0K426vPxvI0_H7mcs3nr2_8lkXyvEIfbHZiCXg,1072 +fastapi/datastructures.py,sha256=oW6xuU0C-sBwbcyXI-MlBO0tSS4BSPB2lYUa1yCw8-A,1905 +fastapi/dependencies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/dependencies/__pycache__/__init__.cpython-310.pyc,, +fastapi/dependencies/__pycache__/models.cpython-310.pyc,, +fastapi/dependencies/__pycache__/utils.cpython-310.pyc,, +fastapi/dependencies/models.py,sha256=zNbioxICuOeb-9ADDVQ45hUHOC0PBtPVEfVU3f1l_nc,2494 +fastapi/dependencies/utils.py,sha256=u1HxNFbKpsnltqRPKxW8cwbcesjBx0WE73txN3HVqLo,27453 +fastapi/encoders.py,sha256=P06VLnm9CVmOJzmgwBsqqjV8kCOe5IlEoice7yhBhCc,5451 +fastapi/exception_handlers.py,sha256=UVYCCe4qt5-5_NuQ3SxTXjDvOdKMHiTfcLp3RUKXhg8,912 +fastapi/exceptions.py,sha256=8B4f4gmHUVaX04L9IxxfEbUzX6OhJy4y6-utQbqNX0Q,1131 +fastapi/logger.py,sha256=I9NNi3ov8AcqbsbC9wl1X-hdItKgYt2XTrx1f99Zpl4,54 +fastapi/middleware/__init__.py,sha256=oQDxiFVcc1fYJUOIFvphnK7pTT5kktmfL32QXpBFvvo,58 +fastapi/middleware/__pycache__/__init__.cpython-310.pyc,, +fastapi/middleware/__pycache__/asyncexitstack.cpython-310.pyc,, +fastapi/middleware/__pycache__/cors.cpython-310.pyc,, +fastapi/middleware/__pycache__/gzip.cpython-310.pyc,, +fastapi/middleware/__pycache__/httpsredirect.cpython-310.pyc,, +fastapi/middleware/__pycache__/trustedhost.cpython-310.pyc,, +fastapi/middleware/__pycache__/wsgi.cpython-310.pyc,, +fastapi/middleware/asyncexitstack.py,sha256=72XjQmQ_tB_tTs9xOc0akXF_7TwZUPdyfc8gsN5LV8E,1197 +fastapi/middleware/cors.py,sha256=ynwjWQZoc_vbhzZ3_ZXceoaSrslHFHPdoM52rXr0WUU,79 +fastapi/middleware/gzip.py,sha256=xM5PcsH8QlAimZw4VDvcmTnqQamslThsfe3CVN2voa0,79 +fastapi/middleware/httpsredirect.py,sha256=rL8eXMnmLijwVkH7_400zHri1AekfeBd6D6qs8ix950,115 +fastapi/middleware/trustedhost.py,sha256=eE5XGRxGa7c5zPnMJDGp3BxaL25k5iVQlhnv-Pk0Pss,109 +fastapi/middleware/wsgi.py,sha256=Z3Ue-7wni4lUZMvH3G9ek__acgYdJstbnpZX_HQAboY,79 +fastapi/openapi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/openapi/__pycache__/__init__.cpython-310.pyc,, +fastapi/openapi/__pycache__/constants.cpython-310.pyc,, +fastapi/openapi/__pycache__/docs.cpython-310.pyc,, +fastapi/openapi/__pycache__/models.cpython-310.pyc,, +fastapi/openapi/__pycache__/utils.cpython-310.pyc,, +fastapi/openapi/constants.py,sha256=sJSpZzRp7Kky9R-jucU-K6_pJzLBRO75ddW7-MixZWc,166 +fastapi/openapi/docs.py,sha256=rmQoCEcbfz-RJ3e3CH0Odr5hVGtcN8OkdHTkoW8TVBc,5934 +fastapi/openapi/models.py,sha256=NIOaWuxQK7WfSuX2VXMCN6dJv6YxOD611sHi3OOgJpc,10959 +fastapi/openapi/utils.py,sha256=cAXJrTOtg47Qfyjwvvhb1DG0bULIn8Xw5ZUtjopxHPw,18373 +fastapi/param_functions.py,sha256=1OLvO0gdd08uNN9FfQ-LPIfTgtlulFcoooGw_24qJdM,7389 +fastapi/params.py,sha256=sdneBjY1qaNzGf-Att4UG6OFIVNa9E21YPy1wFmuuco,10464 +fastapi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/requests.py,sha256=zayepKFcienBllv3snmWI20Gk0oHNVLU4DDhqXBb4LU,142 +fastapi/responses.py,sha256=K9Gzj0E5GfwTgLiuCE-BuiXn9JdqGJHeXBMkvd8lFC8,1196 +fastapi/routing.py,sha256=3I5FY20zyAoYmFKW9238wf3w5ucAqV8T5YSHaDq1xek,53475 +fastapi/security/__init__.py,sha256=bO8pNmxqVRXUjfl2mOKiVZLn0FpBQ61VUYVjmppnbJw,881 +fastapi/security/__pycache__/__init__.cpython-310.pyc,, +fastapi/security/__pycache__/api_key.cpython-310.pyc,, +fastapi/security/__pycache__/base.cpython-310.pyc,, +fastapi/security/__pycache__/http.cpython-310.pyc,, +fastapi/security/__pycache__/oauth2.cpython-310.pyc,, +fastapi/security/__pycache__/open_id_connect_url.cpython-310.pyc,, +fastapi/security/__pycache__/utils.cpython-310.pyc,, +fastapi/security/api_key.py,sha256=NbVpS9TxDOaipoZa8-SREHyMtTcM3bmy5szMiQxEX9s,2793 +fastapi/security/base.py,sha256=dl4pvbC-RxjfbWgPtCWd8MVU-7CB2SZ22rJDXVCXO6c,141 +fastapi/security/http.py,sha256=ZSy3DFKFDLa3-I4vwsY1r8hQB_VrtAXw4-fMJauZIK0,5984 +fastapi/security/oauth2.py,sha256=xkbUW0b-G4aiEhSO7BZyE7iAdYK41cXB-SL1MnQvBh4,8183 +fastapi/security/open_id_connect_url.py,sha256=iikzuJCz_DG44Q77VrupqSoCbJYaiXkuo_W-kdmAzeo,1145 +fastapi/security/utils.py,sha256=izlh-HBaL1VnJeOeRTQnyNgI3hgTFs73eCyLy-snb4A,266 +fastapi/staticfiles.py,sha256=iirGIt3sdY2QZXd36ijs3Cj-T0FuGFda3cd90kM9Ikw,69 +fastapi/templating.py,sha256=4zsuTWgcjcEainMJFAlW6-gnslm6AgOS1SiiDWfmQxk,76 +fastapi/testclient.py,sha256=nBvaAmX66YldReJNZXPOk1sfuo2Q6hs8bOvIaCep6LQ,66 +fastapi/types.py,sha256=r6MngTHzkZOP9lzXgduje9yeZe5EInWAzCLuRJlhIuE,118 +fastapi/utils.py,sha256=cHfKtgDwGvh31c6OaysiGnfpTWN41TRNuVbLrDPZixg,6169 +fastapi/websockets.py,sha256=SroIkqE-lfChvtRP3mFaNKKtD6TmePDWBZtQfgM4noo,148 diff --git a/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/REQUESTED b/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/WHEEL b/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/WHEEL new file mode 100644 index 0000000..668ba4d --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi-0.75.2.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.7.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/fastapi/__init__.py b/venv/lib/python3.10/site-packages/fastapi/__init__.py new file mode 100644 index 0000000..22d8e51 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/__init__.py @@ -0,0 +1,24 @@ +"""FastAPI framework, high performance, easy to learn, fast to code, ready for production""" + +__version__ = "0.75.2" + +from starlette import status as status + +from .applications import FastAPI as FastAPI +from .background import BackgroundTasks as BackgroundTasks +from .datastructures import UploadFile as UploadFile +from .exceptions import HTTPException as HTTPException +from .param_functions import Body as Body +from .param_functions import Cookie as Cookie +from .param_functions import Depends as Depends +from .param_functions import File as File +from .param_functions import Form as Form +from .param_functions import Header as Header +from .param_functions import Path as Path +from .param_functions import Query as Query +from .param_functions import Security as Security +from .requests import Request as Request +from .responses import Response as Response +from .routing import APIRouter as APIRouter +from .websockets import WebSocket as WebSocket +from .websockets import WebSocketDisconnect as WebSocketDisconnect diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..79711c2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/applications.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/applications.cpython-310.pyc new file mode 100644 index 0000000..bafe054 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/applications.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/background.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/background.cpython-310.pyc new file mode 100644 index 0000000..d80eb5a Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/background.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/concurrency.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/concurrency.cpython-310.pyc new file mode 100644 index 0000000..d384390 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/concurrency.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/datastructures.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/datastructures.cpython-310.pyc new file mode 100644 index 0000000..855d691 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/datastructures.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/encoders.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/encoders.cpython-310.pyc new file mode 100644 index 0000000..f55b9cb Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/encoders.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/exception_handlers.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/exception_handlers.cpython-310.pyc new file mode 100644 index 0000000..0057976 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/exception_handlers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000..9b7b1ff Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/exceptions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/logger.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/logger.cpython-310.pyc new file mode 100644 index 0000000..2fa45e1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/logger.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/param_functions.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/param_functions.cpython-310.pyc new file mode 100644 index 0000000..58c2437 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/param_functions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/params.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/params.cpython-310.pyc new file mode 100644 index 0000000..df4e993 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/params.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/requests.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/requests.cpython-310.pyc new file mode 100644 index 0000000..9c60a39 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/requests.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/responses.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/responses.cpython-310.pyc new file mode 100644 index 0000000..da1c2df Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/responses.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/routing.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/routing.cpython-310.pyc new file mode 100644 index 0000000..e50ca28 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/routing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/staticfiles.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/staticfiles.cpython-310.pyc new file mode 100644 index 0000000..cf447df Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/staticfiles.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/templating.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/templating.cpython-310.pyc new file mode 100644 index 0000000..8a22f3e Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/templating.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/testclient.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/testclient.cpython-310.pyc new file mode 100644 index 0000000..e67267e Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/testclient.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/types.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/types.cpython-310.pyc new file mode 100644 index 0000000..6f79b2a Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/types.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000..915bc2c Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/__pycache__/websockets.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/__pycache__/websockets.cpython-310.pyc new file mode 100644 index 0000000..7a1c0d2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/__pycache__/websockets.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/applications.py b/venv/lib/python3.10/site-packages/fastapi/applications.py new file mode 100644 index 0000000..132a94c --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/applications.py @@ -0,0 +1,863 @@ +from enum import Enum +from typing import Any, Callable, Coroutine, Dict, List, Optional, Sequence, Type, Union + +from fastapi import routing +from fastapi.datastructures import Default, DefaultPlaceholder +from fastapi.encoders import DictIntStrAny, SetIntStr +from fastapi.exception_handlers import ( + http_exception_handler, + request_validation_exception_handler, +) +from fastapi.exceptions import RequestValidationError +from fastapi.logger import logger +from fastapi.middleware.asyncexitstack import AsyncExitStackMiddleware +from fastapi.openapi.docs import ( + get_redoc_html, + get_swagger_ui_html, + get_swagger_ui_oauth2_redirect_html, +) +from fastapi.openapi.utils import get_openapi +from fastapi.params import Depends +from fastapi.types import DecoratedCallable +from fastapi.utils import generate_unique_id +from starlette.applications import Starlette +from starlette.datastructures import State +from starlette.exceptions import ExceptionMiddleware, HTTPException +from starlette.middleware import Middleware +from starlette.middleware.errors import ServerErrorMiddleware +from starlette.requests import Request +from starlette.responses import HTMLResponse, JSONResponse, Response +from starlette.routing import BaseRoute +from starlette.types import ASGIApp, Receive, Scope, Send + + +class FastAPI(Starlette): + def __init__( + self, + *, + debug: bool = False, + routes: Optional[List[BaseRoute]] = None, + title: str = "FastAPI", + description: str = "", + version: str = "0.1.0", + openapi_url: Optional[str] = "/openapi.json", + openapi_tags: Optional[List[Dict[str, Any]]] = None, + servers: Optional[List[Dict[str, Union[str, Any]]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + default_response_class: Type[Response] = Default(JSONResponse), + docs_url: Optional[str] = "/docs", + redoc_url: Optional[str] = "/redoc", + swagger_ui_oauth2_redirect_url: Optional[str] = "/docs/oauth2-redirect", + swagger_ui_init_oauth: Optional[Dict[str, Any]] = None, + middleware: Optional[Sequence[Middleware]] = None, + exception_handlers: Optional[ + Dict[ + Union[int, Type[Exception]], + Callable[[Request, Any], Coroutine[Any, Any, Response]], + ] + ] = None, + on_startup: Optional[Sequence[Callable[[], Any]]] = None, + on_shutdown: Optional[Sequence[Callable[[], Any]]] = None, + terms_of_service: Optional[str] = None, + contact: Optional[Dict[str, Union[str, Any]]] = None, + license_info: Optional[Dict[str, Union[str, Any]]] = None, + openapi_prefix: str = "", + root_path: str = "", + root_path_in_servers: bool = True, + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + callbacks: Optional[List[BaseRoute]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + swagger_ui_parameters: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + **extra: Any, + ) -> None: + self._debug: bool = debug + self.title = title + self.description = description + self.version = version + self.terms_of_service = terms_of_service + self.contact = contact + self.license_info = license_info + self.openapi_url = openapi_url + self.openapi_tags = openapi_tags + self.root_path_in_servers = root_path_in_servers + self.docs_url = docs_url + self.redoc_url = redoc_url + self.swagger_ui_oauth2_redirect_url = swagger_ui_oauth2_redirect_url + self.swagger_ui_init_oauth = swagger_ui_init_oauth + self.swagger_ui_parameters = swagger_ui_parameters + self.servers = servers or [] + self.extra = extra + self.openapi_version = "3.0.2" + self.openapi_schema: Optional[Dict[str, Any]] = None + if self.openapi_url: + assert self.title, "A title must be provided for OpenAPI, e.g.: 'My API'" + assert self.version, "A version must be provided for OpenAPI, e.g.: '2.1.0'" + # TODO: remove when discarding the openapi_prefix parameter + if openapi_prefix: + logger.warning( + '"openapi_prefix" has been deprecated in favor of "root_path", which ' + "follows more closely the ASGI standard, is simpler, and more " + "automatic. Check the docs at " + "https://fastapi.tiangolo.com/advanced/sub-applications/" + ) + self.root_path = root_path or openapi_prefix + self.state: State = State() + self.dependency_overrides: Dict[Callable[..., Any], Callable[..., Any]] = {} + self.router: routing.APIRouter = routing.APIRouter( + routes=routes, + dependency_overrides_provider=self, + on_startup=on_startup, + on_shutdown=on_shutdown, + default_response_class=default_response_class, + dependencies=dependencies, + callbacks=callbacks, + deprecated=deprecated, + include_in_schema=include_in_schema, + responses=responses, + generate_unique_id_function=generate_unique_id_function, + ) + self.exception_handlers: Dict[ + Union[int, Type[Exception]], + Callable[[Request, Any], Coroutine[Any, Any, Response]], + ] = ( + {} if exception_handlers is None else dict(exception_handlers) + ) + self.exception_handlers.setdefault(HTTPException, http_exception_handler) + self.exception_handlers.setdefault( + RequestValidationError, request_validation_exception_handler + ) + + self.user_middleware: List[Middleware] = ( + [] if middleware is None else list(middleware) + ) + self.middleware_stack: ASGIApp = self.build_middleware_stack() + self.setup() + + def build_middleware_stack(self) -> ASGIApp: + # Duplicate/override from Starlette to add AsyncExitStackMiddleware + # inside of ExceptionMiddleware, inside of custom user middlewares + debug = self.debug + error_handler = None + exception_handlers = {} + + for key, value in self.exception_handlers.items(): + if key in (500, Exception): + error_handler = value + else: + exception_handlers[key] = value + + middleware = ( + [Middleware(ServerErrorMiddleware, handler=error_handler, debug=debug)] + + self.user_middleware + + [ + Middleware( + ExceptionMiddleware, handlers=exception_handlers, debug=debug + ), + # Add FastAPI-specific AsyncExitStackMiddleware for dependencies with + # contextvars. + # This needs to happen after user middlewares because those create a + # new contextvars context copy by using a new AnyIO task group. + # The initial part of dependencies with yield is executed in the + # FastAPI code, inside all the middlewares, but the teardown part + # (after yield) is executed in the AsyncExitStack in this middleware, + # if the AsyncExitStack lived outside of the custom middlewares and + # contextvars were set in a dependency with yield in that internal + # contextvars context, the values would not be available in the + # outside context of the AsyncExitStack. + # By putting the middleware and the AsyncExitStack here, inside all + # user middlewares, the code before and after yield in dependencies + # with yield is executed in the same contextvars context, so all values + # set in contextvars before yield is still available after yield as + # would be expected. + # Additionally, by having this AsyncExitStack here, after the + # ExceptionMiddleware, now dependencies can catch handled exceptions, + # e.g. HTTPException, to customize the teardown code (e.g. DB session + # rollback). + Middleware(AsyncExitStackMiddleware), + ] + ) + + app = self.router + for cls, options in reversed(middleware): + app = cls(app=app, **options) + return app + + def openapi(self) -> Dict[str, Any]: + if not self.openapi_schema: + self.openapi_schema = get_openapi( + title=self.title, + version=self.version, + openapi_version=self.openapi_version, + description=self.description, + terms_of_service=self.terms_of_service, + contact=self.contact, + license_info=self.license_info, + routes=self.routes, + tags=self.openapi_tags, + servers=self.servers, + ) + return self.openapi_schema + + def setup(self) -> None: + if self.openapi_url: + urls = (server_data.get("url") for server_data in self.servers) + server_urls = {url for url in urls if url} + + async def openapi(req: Request) -> JSONResponse: + root_path = req.scope.get("root_path", "").rstrip("/") + if root_path not in server_urls: + if root_path and self.root_path_in_servers: + self.servers.insert(0, {"url": root_path}) + server_urls.add(root_path) + return JSONResponse(self.openapi()) + + self.add_route(self.openapi_url, openapi, include_in_schema=False) + if self.openapi_url and self.docs_url: + + async def swagger_ui_html(req: Request) -> HTMLResponse: + root_path = req.scope.get("root_path", "").rstrip("/") + openapi_url = root_path + self.openapi_url + oauth2_redirect_url = self.swagger_ui_oauth2_redirect_url + if oauth2_redirect_url: + oauth2_redirect_url = root_path + oauth2_redirect_url + return get_swagger_ui_html( + openapi_url=openapi_url, + title=self.title + " - Swagger UI", + oauth2_redirect_url=oauth2_redirect_url, + init_oauth=self.swagger_ui_init_oauth, + swagger_ui_parameters=self.swagger_ui_parameters, + ) + + self.add_route(self.docs_url, swagger_ui_html, include_in_schema=False) + + if self.swagger_ui_oauth2_redirect_url: + + async def swagger_ui_redirect(req: Request) -> HTMLResponse: + return get_swagger_ui_oauth2_redirect_html() + + self.add_route( + self.swagger_ui_oauth2_redirect_url, + swagger_ui_redirect, + include_in_schema=False, + ) + if self.openapi_url and self.redoc_url: + + async def redoc_html(req: Request) -> HTMLResponse: + root_path = req.scope.get("root_path", "").rstrip("/") + openapi_url = root_path + self.openapi_url + return get_redoc_html( + openapi_url=openapi_url, title=self.title + " - ReDoc" + ) + + self.add_route(self.redoc_url, redoc_html, include_in_schema=False) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if self.root_path: + scope["root_path"] = self.root_path + await super().__call__(scope, receive, send) + + def add_api_route( + self, + path: str, + endpoint: Callable[..., Coroutine[Any, Any, Response]], + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[List[str]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Union[Type[Response], DefaultPlaceholder] = Default( + JSONResponse + ), + name: Optional[str] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> None: + self.router.add_api_route( + path, + endpoint=endpoint, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def api_route( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[List[str]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.router.add_api_route( + path, + func, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + return func + + return decorator + + def add_api_websocket_route( + self, path: str, endpoint: Callable[..., Any], name: Optional[str] = None + ) -> None: + self.router.add_api_websocket_route(path, endpoint, name=name) + + def websocket( + self, path: str, name: Optional[str] = None + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_api_websocket_route(path, func, name=name) + return func + + return decorator + + def include_router( + self, + router: routing.APIRouter, + *, + prefix: str = "", + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + default_response_class: Type[Response] = Default(JSONResponse), + callbacks: Optional[List[BaseRoute]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> None: + self.router.include_router( + router, + prefix=prefix, + tags=tags, + dependencies=dependencies, + responses=responses, + deprecated=deprecated, + include_in_schema=include_in_schema, + default_response_class=default_response_class, + callbacks=callbacks, + generate_unique_id_function=generate_unique_id_function, + ) + + def get( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.get( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def put( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.put( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def post( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.post( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def delete( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.delete( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + operation_id=operation_id, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def options( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.options( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def head( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.head( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def patch( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.patch( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def trace( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.trace( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) diff --git a/venv/lib/python3.10/site-packages/fastapi/background.py b/venv/lib/python3.10/site-packages/fastapi/background.py new file mode 100644 index 0000000..dd3bbe2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/background.py @@ -0,0 +1 @@ +from starlette.background import BackgroundTasks as BackgroundTasks # noqa diff --git a/venv/lib/python3.10/site-packages/fastapi/concurrency.py b/venv/lib/python3.10/site-packages/fastapi/concurrency.py new file mode 100644 index 0000000..04382c6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/concurrency.py @@ -0,0 +1,32 @@ +import sys +from typing import AsyncGenerator, ContextManager, TypeVar + +from starlette.concurrency import iterate_in_threadpool as iterate_in_threadpool # noqa +from starlette.concurrency import run_in_threadpool as run_in_threadpool # noqa +from starlette.concurrency import ( # noqa + run_until_first_complete as run_until_first_complete, +) + +if sys.version_info >= (3, 7): + from contextlib import AsyncExitStack as AsyncExitStack + from contextlib import asynccontextmanager as asynccontextmanager +else: + from contextlib2 import AsyncExitStack as AsyncExitStack # noqa + from contextlib2 import asynccontextmanager as asynccontextmanager # noqa + + +_T = TypeVar("_T") + + +@asynccontextmanager +async def contextmanager_in_threadpool( + cm: ContextManager[_T], +) -> AsyncGenerator[_T, None]: + try: + yield await run_in_threadpool(cm.__enter__) + except Exception as e: + ok = await run_in_threadpool(cm.__exit__, type(e), e, None) + if not ok: + raise e + else: + await run_in_threadpool(cm.__exit__, None, None, None) diff --git a/venv/lib/python3.10/site-packages/fastapi/datastructures.py b/venv/lib/python3.10/site-packages/fastapi/datastructures.py new file mode 100644 index 0000000..b20a25a --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/datastructures.py @@ -0,0 +1,56 @@ +from typing import Any, Callable, Dict, Iterable, Type, TypeVar + +from starlette.datastructures import URL as URL # noqa: F401 +from starlette.datastructures import Address as Address # noqa: F401 +from starlette.datastructures import FormData as FormData # noqa: F401 +from starlette.datastructures import Headers as Headers # noqa: F401 +from starlette.datastructures import QueryParams as QueryParams # noqa: F401 +from starlette.datastructures import State as State # noqa: F401 +from starlette.datastructures import UploadFile as StarletteUploadFile + + +class UploadFile(StarletteUploadFile): + @classmethod + def __get_validators__(cls: Type["UploadFile"]) -> Iterable[Callable[..., Any]]: + yield cls.validate + + @classmethod + def validate(cls: Type["UploadFile"], v: Any) -> Any: + if not isinstance(v, StarletteUploadFile): + raise ValueError(f"Expected UploadFile, received: {type(v)}") + return v + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update({"type": "string", "format": "binary"}) + + +class DefaultPlaceholder: + """ + You shouldn't use this class directly. + + It's used internally to recognize when a default value has been overwritten, even + if the overridden default value was truthy. + """ + + def __init__(self, value: Any): + self.value = value + + def __bool__(self) -> bool: + return bool(self.value) + + def __eq__(self, o: object) -> bool: + return isinstance(o, DefaultPlaceholder) and o.value == self.value + + +DefaultType = TypeVar("DefaultType") + + +def Default(value: DefaultType) -> DefaultType: + """ + You shouldn't use this function directly. + + It's used internally to recognize when a default value has been overwritten, even + if the overridden default value was truthy. + """ + return DefaultPlaceholder(value) # type: ignore diff --git a/venv/lib/python3.10/site-packages/fastapi/dependencies/__init__.py b/venv/lib/python3.10/site-packages/fastapi/dependencies/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/fastapi/dependencies/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/dependencies/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..255f648 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/dependencies/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/dependencies/__pycache__/models.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/dependencies/__pycache__/models.cpython-310.pyc new file mode 100644 index 0000000..3e981de Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/dependencies/__pycache__/models.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/dependencies/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/dependencies/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000..df9dfc0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/dependencies/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/dependencies/models.py b/venv/lib/python3.10/site-packages/fastapi/dependencies/models.py new file mode 100644 index 0000000..443590b --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/dependencies/models.py @@ -0,0 +1,58 @@ +from typing import Any, Callable, List, Optional, Sequence + +from fastapi.security.base import SecurityBase +from pydantic.fields import ModelField + + +class SecurityRequirement: + def __init__( + self, security_scheme: SecurityBase, scopes: Optional[Sequence[str]] = None + ): + self.security_scheme = security_scheme + self.scopes = scopes + + +class Dependant: + def __init__( + self, + *, + path_params: Optional[List[ModelField]] = None, + query_params: Optional[List[ModelField]] = None, + header_params: Optional[List[ModelField]] = None, + cookie_params: Optional[List[ModelField]] = None, + body_params: Optional[List[ModelField]] = None, + dependencies: Optional[List["Dependant"]] = None, + security_schemes: Optional[List[SecurityRequirement]] = None, + name: Optional[str] = None, + call: Optional[Callable[..., Any]] = None, + request_param_name: Optional[str] = None, + websocket_param_name: Optional[str] = None, + http_connection_param_name: Optional[str] = None, + response_param_name: Optional[str] = None, + background_tasks_param_name: Optional[str] = None, + security_scopes_param_name: Optional[str] = None, + security_scopes: Optional[List[str]] = None, + use_cache: bool = True, + path: Optional[str] = None, + ) -> None: + self.path_params = path_params or [] + self.query_params = query_params or [] + self.header_params = header_params or [] + self.cookie_params = cookie_params or [] + self.body_params = body_params or [] + self.dependencies = dependencies or [] + self.security_requirements = security_schemes or [] + self.request_param_name = request_param_name + self.websocket_param_name = websocket_param_name + self.http_connection_param_name = http_connection_param_name + self.response_param_name = response_param_name + self.background_tasks_param_name = background_tasks_param_name + self.security_scopes = security_scopes + self.security_scopes_param_name = security_scopes_param_name + self.name = name + self.call = call + self.use_cache = use_cache + # Store the path to be able to re-generate a dependable from it in overrides + self.path = path + # Save the cache key at creation to optimize performance + self.cache_key = (self.call, tuple(sorted(set(self.security_scopes or [])))) diff --git a/venv/lib/python3.10/site-packages/fastapi/dependencies/utils.py b/venv/lib/python3.10/site-packages/fastapi/dependencies/utils.py new file mode 100644 index 0000000..d4028d0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/dependencies/utils.py @@ -0,0 +1,751 @@ +import dataclasses +import inspect +from contextlib import contextmanager +from copy import deepcopy +from typing import ( + Any, + Callable, + Coroutine, + Dict, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import anyio +from fastapi import params +from fastapi.concurrency import ( + AsyncExitStack, + asynccontextmanager, + contextmanager_in_threadpool, +) +from fastapi.dependencies.models import Dependant, SecurityRequirement +from fastapi.logger import logger +from fastapi.security.base import SecurityBase +from fastapi.security.oauth2 import OAuth2, SecurityScopes +from fastapi.security.open_id_connect_url import OpenIdConnect +from fastapi.utils import create_response_field, get_path_param_names +from pydantic import BaseModel, create_model +from pydantic.error_wrappers import ErrorWrapper +from pydantic.errors import MissingError +from pydantic.fields import ( + SHAPE_LIST, + SHAPE_SEQUENCE, + SHAPE_SET, + SHAPE_SINGLETON, + SHAPE_TUPLE, + SHAPE_TUPLE_ELLIPSIS, + FieldInfo, + ModelField, + Required, +) +from pydantic.schema import get_annotation_from_field_info +from pydantic.typing import ForwardRef, evaluate_forwardref +from pydantic.utils import lenient_issubclass +from starlette.background import BackgroundTasks +from starlette.concurrency import run_in_threadpool +from starlette.datastructures import FormData, Headers, QueryParams, UploadFile +from starlette.requests import HTTPConnection, Request +from starlette.responses import Response +from starlette.websockets import WebSocket + +sequence_shapes = { + SHAPE_LIST, + SHAPE_SET, + SHAPE_TUPLE, + SHAPE_SEQUENCE, + SHAPE_TUPLE_ELLIPSIS, +} +sequence_types = (list, set, tuple) +sequence_shape_to_type = { + SHAPE_LIST: list, + SHAPE_SET: set, + SHAPE_TUPLE: tuple, + SHAPE_SEQUENCE: list, + SHAPE_TUPLE_ELLIPSIS: list, +} + + +multipart_not_installed_error = ( + 'Form data requires "python-multipart" to be installed. \n' + 'You can install "python-multipart" with: \n\n' + "pip install python-multipart\n" +) +multipart_incorrect_install_error = ( + 'Form data requires "python-multipart" to be installed. ' + 'It seems you installed "multipart" instead. \n' + 'You can remove "multipart" with: \n\n' + "pip uninstall multipart\n\n" + 'And then install "python-multipart" with: \n\n' + "pip install python-multipart\n" +) + + +def check_file_field(field: ModelField) -> None: + field_info = field.field_info + if isinstance(field_info, params.Form): + try: + # __version__ is available in both multiparts, and can be mocked + from multipart import __version__ # type: ignore + + assert __version__ + try: + # parse_options_header is only available in the right multipart + from multipart.multipart import parse_options_header # type: ignore + + assert parse_options_header + except ImportError: + logger.error(multipart_incorrect_install_error) + raise RuntimeError(multipart_incorrect_install_error) + except ImportError: + logger.error(multipart_not_installed_error) + raise RuntimeError(multipart_not_installed_error) + + +def get_param_sub_dependant( + *, param: inspect.Parameter, path: str, security_scopes: Optional[List[str]] = None +) -> Dependant: + depends: params.Depends = param.default + if depends.dependency: + dependency = depends.dependency + else: + dependency = param.annotation + return get_sub_dependant( + depends=depends, + dependency=dependency, + path=path, + name=param.name, + security_scopes=security_scopes, + ) + + +def get_parameterless_sub_dependant(*, depends: params.Depends, path: str) -> Dependant: + assert callable( + depends.dependency + ), "A parameter-less dependency must have a callable dependency" + return get_sub_dependant(depends=depends, dependency=depends.dependency, path=path) + + +def get_sub_dependant( + *, + depends: params.Depends, + dependency: Callable[..., Any], + path: str, + name: Optional[str] = None, + security_scopes: Optional[List[str]] = None, +) -> Dependant: + security_requirement = None + security_scopes = security_scopes or [] + if isinstance(depends, params.Security): + dependency_scopes = depends.scopes + security_scopes.extend(dependency_scopes) + if isinstance(dependency, SecurityBase): + use_scopes: List[str] = [] + if isinstance(dependency, (OAuth2, OpenIdConnect)): + use_scopes = security_scopes + security_requirement = SecurityRequirement( + security_scheme=dependency, scopes=use_scopes + ) + sub_dependant = get_dependant( + path=path, + call=dependency, + name=name, + security_scopes=security_scopes, + use_cache=depends.use_cache, + ) + if security_requirement: + sub_dependant.security_requirements.append(security_requirement) + sub_dependant.security_scopes = security_scopes + return sub_dependant + + +CacheKey = Tuple[Optional[Callable[..., Any]], Tuple[str, ...]] + + +def get_flat_dependant( + dependant: Dependant, + *, + skip_repeats: bool = False, + visited: Optional[List[CacheKey]] = None, +) -> Dependant: + if visited is None: + visited = [] + visited.append(dependant.cache_key) + + flat_dependant = Dependant( + path_params=dependant.path_params.copy(), + query_params=dependant.query_params.copy(), + header_params=dependant.header_params.copy(), + cookie_params=dependant.cookie_params.copy(), + body_params=dependant.body_params.copy(), + security_schemes=dependant.security_requirements.copy(), + use_cache=dependant.use_cache, + path=dependant.path, + ) + for sub_dependant in dependant.dependencies: + if skip_repeats and sub_dependant.cache_key in visited: + continue + flat_sub = get_flat_dependant( + sub_dependant, skip_repeats=skip_repeats, visited=visited + ) + flat_dependant.path_params.extend(flat_sub.path_params) + flat_dependant.query_params.extend(flat_sub.query_params) + flat_dependant.header_params.extend(flat_sub.header_params) + flat_dependant.cookie_params.extend(flat_sub.cookie_params) + flat_dependant.body_params.extend(flat_sub.body_params) + flat_dependant.security_requirements.extend(flat_sub.security_requirements) + return flat_dependant + + +def get_flat_params(dependant: Dependant) -> List[ModelField]: + flat_dependant = get_flat_dependant(dependant, skip_repeats=True) + return ( + flat_dependant.path_params + + flat_dependant.query_params + + flat_dependant.header_params + + flat_dependant.cookie_params + ) + + +def is_scalar_field(field: ModelField) -> bool: + field_info = field.field_info + if not ( + field.shape == SHAPE_SINGLETON + and not lenient_issubclass(field.type_, BaseModel) + and not lenient_issubclass(field.type_, sequence_types + (dict,)) + and not dataclasses.is_dataclass(field.type_) + and not isinstance(field_info, params.Body) + ): + return False + if field.sub_fields: + if not all(is_scalar_field(f) for f in field.sub_fields): + return False + return True + + +def is_scalar_sequence_field(field: ModelField) -> bool: + if (field.shape in sequence_shapes) and not lenient_issubclass( + field.type_, BaseModel + ): + if field.sub_fields is not None: + for sub_field in field.sub_fields: + if not is_scalar_field(sub_field): + return False + return True + if lenient_issubclass(field.type_, sequence_types): + return True + return False + + +def get_typed_signature(call: Callable[..., Any]) -> inspect.Signature: + signature = inspect.signature(call) + globalns = getattr(call, "__globals__", {}) + typed_params = [ + inspect.Parameter( + name=param.name, + kind=param.kind, + default=param.default, + annotation=get_typed_annotation(param, globalns), + ) + for param in signature.parameters.values() + ] + typed_signature = inspect.Signature(typed_params) + return typed_signature + + +def get_typed_annotation(param: inspect.Parameter, globalns: Dict[str, Any]) -> Any: + annotation = param.annotation + if isinstance(annotation, str): + annotation = ForwardRef(annotation) + annotation = evaluate_forwardref(annotation, globalns, globalns) + return annotation + + +def get_dependant( + *, + path: str, + call: Callable[..., Any], + name: Optional[str] = None, + security_scopes: Optional[List[str]] = None, + use_cache: bool = True, +) -> Dependant: + path_param_names = get_path_param_names(path) + endpoint_signature = get_typed_signature(call) + signature_params = endpoint_signature.parameters + dependant = Dependant(call=call, name=name, path=path, use_cache=use_cache) + for param_name, param in signature_params.items(): + if isinstance(param.default, params.Depends): + sub_dependant = get_param_sub_dependant( + param=param, path=path, security_scopes=security_scopes + ) + dependant.dependencies.append(sub_dependant) + continue + if add_non_field_param_to_dependency(param=param, dependant=dependant): + continue + param_field = get_param_field( + param=param, default_field_info=params.Query, param_name=param_name + ) + if param_name in path_param_names: + assert is_scalar_field( + field=param_field + ), "Path params must be of one of the supported types" + if isinstance(param.default, params.Path): + ignore_default = False + else: + ignore_default = True + param_field = get_param_field( + param=param, + param_name=param_name, + default_field_info=params.Path, + force_type=params.ParamTypes.path, + ignore_default=ignore_default, + ) + add_param_to_fields(field=param_field, dependant=dependant) + elif is_scalar_field(field=param_field): + add_param_to_fields(field=param_field, dependant=dependant) + elif isinstance( + param.default, (params.Query, params.Header) + ) and is_scalar_sequence_field(param_field): + add_param_to_fields(field=param_field, dependant=dependant) + else: + field_info = param_field.field_info + assert isinstance( + field_info, params.Body + ), f"Param: {param_field.name} can only be a request body, using Body(...)" + dependant.body_params.append(param_field) + return dependant + + +def add_non_field_param_to_dependency( + *, param: inspect.Parameter, dependant: Dependant +) -> Optional[bool]: + if lenient_issubclass(param.annotation, Request): + dependant.request_param_name = param.name + return True + elif lenient_issubclass(param.annotation, WebSocket): + dependant.websocket_param_name = param.name + return True + elif lenient_issubclass(param.annotation, HTTPConnection): + dependant.http_connection_param_name = param.name + return True + elif lenient_issubclass(param.annotation, Response): + dependant.response_param_name = param.name + return True + elif lenient_issubclass(param.annotation, BackgroundTasks): + dependant.background_tasks_param_name = param.name + return True + elif lenient_issubclass(param.annotation, SecurityScopes): + dependant.security_scopes_param_name = param.name + return True + return None + + +def get_param_field( + *, + param: inspect.Parameter, + param_name: str, + default_field_info: Type[params.Param] = params.Param, + force_type: Optional[params.ParamTypes] = None, + ignore_default: bool = False, +) -> ModelField: + default_value = Required + had_schema = False + if not param.default == param.empty and ignore_default is False: + default_value = param.default + if isinstance(default_value, FieldInfo): + had_schema = True + field_info = default_value + default_value = field_info.default + if ( + isinstance(field_info, params.Param) + and getattr(field_info, "in_", None) is None + ): + field_info.in_ = default_field_info.in_ + if force_type: + field_info.in_ = force_type # type: ignore + else: + field_info = default_field_info(default_value) + required = default_value == Required + annotation: Any = Any + if not param.annotation == param.empty: + annotation = param.annotation + annotation = get_annotation_from_field_info(annotation, field_info, param_name) + if not field_info.alias and getattr(field_info, "convert_underscores", None): + alias = param.name.replace("_", "-") + else: + alias = field_info.alias or param.name + field = create_response_field( + name=param.name, + type_=annotation, + default=None if required else default_value, + alias=alias, + required=required, + field_info=field_info, + ) + field.required = required + if not had_schema and not is_scalar_field(field=field): + field.field_info = params.Body(field_info.default) + if not had_schema and lenient_issubclass(field.type_, UploadFile): + field.field_info = params.File(field_info.default) + + return field + + +def add_param_to_fields(*, field: ModelField, dependant: Dependant) -> None: + field_info = cast(params.Param, field.field_info) + if field_info.in_ == params.ParamTypes.path: + dependant.path_params.append(field) + elif field_info.in_ == params.ParamTypes.query: + dependant.query_params.append(field) + elif field_info.in_ == params.ParamTypes.header: + dependant.header_params.append(field) + else: + assert ( + field_info.in_ == params.ParamTypes.cookie + ), f"non-body parameters must be in path, query, header or cookie: {field.name}" + dependant.cookie_params.append(field) + + +def is_coroutine_callable(call: Callable[..., Any]) -> bool: + if inspect.isroutine(call): + return inspect.iscoroutinefunction(call) + if inspect.isclass(call): + return False + call = getattr(call, "__call__", None) + return inspect.iscoroutinefunction(call) + + +def is_async_gen_callable(call: Callable[..., Any]) -> bool: + if inspect.isasyncgenfunction(call): + return True + call = getattr(call, "__call__", None) + return inspect.isasyncgenfunction(call) + + +def is_gen_callable(call: Callable[..., Any]) -> bool: + if inspect.isgeneratorfunction(call): + return True + call = getattr(call, "__call__", None) + return inspect.isgeneratorfunction(call) + + +async def solve_generator( + *, call: Callable[..., Any], stack: AsyncExitStack, sub_values: Dict[str, Any] +) -> Any: + if is_gen_callable(call): + cm = contextmanager_in_threadpool(contextmanager(call)(**sub_values)) + elif is_async_gen_callable(call): + cm = asynccontextmanager(call)(**sub_values) + return await stack.enter_async_context(cm) + + +async def solve_dependencies( + *, + request: Union[Request, WebSocket], + dependant: Dependant, + body: Optional[Union[Dict[str, Any], FormData]] = None, + background_tasks: Optional[BackgroundTasks] = None, + response: Optional[Response] = None, + dependency_overrides_provider: Optional[Any] = None, + dependency_cache: Optional[Dict[Tuple[Callable[..., Any], Tuple[str]], Any]] = None, +) -> Tuple[ + Dict[str, Any], + List[ErrorWrapper], + Optional[BackgroundTasks], + Response, + Dict[Tuple[Callable[..., Any], Tuple[str]], Any], +]: + values: Dict[str, Any] = {} + errors: List[ErrorWrapper] = [] + response = response or Response( + content=None, + status_code=None, # type: ignore + headers=None, # type: ignore # in Starlette + media_type=None, # type: ignore # in Starlette + background=None, # type: ignore # in Starlette + ) + dependency_cache = dependency_cache or {} + sub_dependant: Dependant + for sub_dependant in dependant.dependencies: + sub_dependant.call = cast(Callable[..., Any], sub_dependant.call) + sub_dependant.cache_key = cast( + Tuple[Callable[..., Any], Tuple[str]], sub_dependant.cache_key + ) + call = sub_dependant.call + use_sub_dependant = sub_dependant + if ( + dependency_overrides_provider + and dependency_overrides_provider.dependency_overrides + ): + original_call = sub_dependant.call + call = getattr( + dependency_overrides_provider, "dependency_overrides", {} + ).get(original_call, original_call) + use_path: str = sub_dependant.path # type: ignore + use_sub_dependant = get_dependant( + path=use_path, + call=call, + name=sub_dependant.name, + security_scopes=sub_dependant.security_scopes, + ) + use_sub_dependant.security_scopes = sub_dependant.security_scopes + + solved_result = await solve_dependencies( + request=request, + dependant=use_sub_dependant, + body=body, + background_tasks=background_tasks, + response=response, + dependency_overrides_provider=dependency_overrides_provider, + dependency_cache=dependency_cache, + ) + ( + sub_values, + sub_errors, + background_tasks, + _, # the subdependency returns the same response we have + sub_dependency_cache, + ) = solved_result + dependency_cache.update(sub_dependency_cache) + if sub_errors: + errors.extend(sub_errors) + continue + if sub_dependant.use_cache and sub_dependant.cache_key in dependency_cache: + solved = dependency_cache[sub_dependant.cache_key] + elif is_gen_callable(call) or is_async_gen_callable(call): + stack = request.scope.get("fastapi_astack") + assert isinstance(stack, AsyncExitStack) + solved = await solve_generator( + call=call, stack=stack, sub_values=sub_values + ) + elif is_coroutine_callable(call): + solved = await call(**sub_values) + else: + solved = await run_in_threadpool(call, **sub_values) + if sub_dependant.name is not None: + values[sub_dependant.name] = solved + if sub_dependant.cache_key not in dependency_cache: + dependency_cache[sub_dependant.cache_key] = solved + path_values, path_errors = request_params_to_args( + dependant.path_params, request.path_params + ) + query_values, query_errors = request_params_to_args( + dependant.query_params, request.query_params + ) + header_values, header_errors = request_params_to_args( + dependant.header_params, request.headers + ) + cookie_values, cookie_errors = request_params_to_args( + dependant.cookie_params, request.cookies + ) + values.update(path_values) + values.update(query_values) + values.update(header_values) + values.update(cookie_values) + errors += path_errors + query_errors + header_errors + cookie_errors + if dependant.body_params: + ( + body_values, + body_errors, + ) = await request_body_to_args( # body_params checked above + required_params=dependant.body_params, received_body=body + ) + values.update(body_values) + errors.extend(body_errors) + if dependant.http_connection_param_name: + values[dependant.http_connection_param_name] = request + if dependant.request_param_name and isinstance(request, Request): + values[dependant.request_param_name] = request + elif dependant.websocket_param_name and isinstance(request, WebSocket): + values[dependant.websocket_param_name] = request + if dependant.background_tasks_param_name: + if background_tasks is None: + background_tasks = BackgroundTasks() + values[dependant.background_tasks_param_name] = background_tasks + if dependant.response_param_name: + values[dependant.response_param_name] = response + if dependant.security_scopes_param_name: + values[dependant.security_scopes_param_name] = SecurityScopes( + scopes=dependant.security_scopes + ) + return values, errors, background_tasks, response, dependency_cache + + +def request_params_to_args( + required_params: Sequence[ModelField], + received_params: Union[Mapping[str, Any], QueryParams, Headers], +) -> Tuple[Dict[str, Any], List[ErrorWrapper]]: + values = {} + errors = [] + for field in required_params: + if is_scalar_sequence_field(field) and isinstance( + received_params, (QueryParams, Headers) + ): + value = received_params.getlist(field.alias) or field.default + else: + value = received_params.get(field.alias) + field_info = field.field_info + assert isinstance( + field_info, params.Param + ), "Params must be subclasses of Param" + if value is None: + if field.required: + errors.append( + ErrorWrapper( + MissingError(), loc=(field_info.in_.value, field.alias) + ) + ) + else: + values[field.name] = deepcopy(field.default) + continue + v_, errors_ = field.validate( + value, values, loc=(field_info.in_.value, field.alias) + ) + if isinstance(errors_, ErrorWrapper): + errors.append(errors_) + elif isinstance(errors_, list): + errors.extend(errors_) + else: + values[field.name] = v_ + return values, errors + + +async def request_body_to_args( + required_params: List[ModelField], + received_body: Optional[Union[Dict[str, Any], FormData]], +) -> Tuple[Dict[str, Any], List[ErrorWrapper]]: + values = {} + errors = [] + if required_params: + field = required_params[0] + field_info = field.field_info + embed = getattr(field_info, "embed", None) + field_alias_omitted = len(required_params) == 1 and not embed + if field_alias_omitted: + received_body = {field.alias: received_body} + + for field in required_params: + loc: Tuple[str, ...] + if field_alias_omitted: + loc = ("body",) + else: + loc = ("body", field.alias) + + value: Optional[Any] = None + if received_body is not None: + if ( + field.shape in sequence_shapes or field.type_ in sequence_types + ) and isinstance(received_body, FormData): + value = received_body.getlist(field.alias) + else: + try: + value = received_body.get(field.alias) + except AttributeError: + errors.append(get_missing_field_error(loc)) + continue + if ( + value is None + or (isinstance(field_info, params.Form) and value == "") + or ( + isinstance(field_info, params.Form) + and field.shape in sequence_shapes + and len(value) == 0 + ) + ): + if field.required: + errors.append(get_missing_field_error(loc)) + else: + values[field.name] = deepcopy(field.default) + continue + if ( + isinstance(field_info, params.File) + and lenient_issubclass(field.type_, bytes) + and isinstance(value, UploadFile) + ): + value = await value.read() + elif ( + field.shape in sequence_shapes + and isinstance(field_info, params.File) + and lenient_issubclass(field.type_, bytes) + and isinstance(value, sequence_types) + ): + results: List[Union[bytes, str]] = [] + + async def process_fn( + fn: Callable[[], Coroutine[Any, Any, Any]] + ) -> None: + result = await fn() + results.append(result) + + async with anyio.create_task_group() as tg: + for sub_value in value: + tg.start_soon(process_fn, sub_value.read) + value = sequence_shape_to_type[field.shape](results) + + v_, errors_ = field.validate(value, values, loc=loc) + + if isinstance(errors_, ErrorWrapper): + errors.append(errors_) + elif isinstance(errors_, list): + errors.extend(errors_) + else: + values[field.name] = v_ + return values, errors + + +def get_missing_field_error(loc: Tuple[str, ...]) -> ErrorWrapper: + missing_field_error = ErrorWrapper(MissingError(), loc=loc) + return missing_field_error + + +def get_body_field(*, dependant: Dependant, name: str) -> Optional[ModelField]: + flat_dependant = get_flat_dependant(dependant) + if not flat_dependant.body_params: + return None + first_param = flat_dependant.body_params[0] + field_info = first_param.field_info + embed = getattr(field_info, "embed", None) + body_param_names_set = {param.name for param in flat_dependant.body_params} + if len(body_param_names_set) == 1 and not embed: + check_file_field(first_param) + return first_param + # If one field requires to embed, all have to be embedded + # in case a sub-dependency is evaluated with a single unique body field + # That is combined (embedded) with other body fields + for param in flat_dependant.body_params: + setattr(param.field_info, "embed", True) + model_name = "Body_" + name + BodyModel: Type[BaseModel] = create_model(model_name) + for f in flat_dependant.body_params: + BodyModel.__fields__[f.name] = f + required = any(True for f in flat_dependant.body_params if f.required) + + BodyFieldInfo_kwargs: Dict[str, Any] = dict(default=None) + if any(isinstance(f.field_info, params.File) for f in flat_dependant.body_params): + BodyFieldInfo: Type[params.Body] = params.File + elif any(isinstance(f.field_info, params.Form) for f in flat_dependant.body_params): + BodyFieldInfo = params.Form + else: + BodyFieldInfo = params.Body + + body_param_media_types = [ + getattr(f.field_info, "media_type") + for f in flat_dependant.body_params + if isinstance(f.field_info, params.Body) + ] + if len(set(body_param_media_types)) == 1: + BodyFieldInfo_kwargs["media_type"] = body_param_media_types[0] + final_field = create_response_field( + name="body", + type_=BodyModel, + required=required, + alias="body", + field_info=BodyFieldInfo(**BodyFieldInfo_kwargs), + ) + check_file_field(final_field) + return final_field diff --git a/venv/lib/python3.10/site-packages/fastapi/encoders.py b/venv/lib/python3.10/site-packages/fastapi/encoders.py new file mode 100644 index 0000000..4b7ffe3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/encoders.py @@ -0,0 +1,153 @@ +import dataclasses +from collections import defaultdict +from enum import Enum +from pathlib import PurePath +from types import GeneratorType +from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union + +from pydantic import BaseModel +from pydantic.json import ENCODERS_BY_TYPE + +SetIntStr = Set[Union[int, str]] +DictIntStrAny = Dict[Union[int, str], Any] + + +def generate_encoders_by_class_tuples( + type_encoder_map: Dict[Any, Callable[[Any], Any]] +) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]: + encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict( + tuple + ) + for type_, encoder in type_encoder_map.items(): + encoders_by_class_tuples[encoder] += (type_,) + return encoders_by_class_tuples + + +encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE) + + +def jsonable_encoder( + obj: Any, + include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + by_alias: bool = True, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None, + sqlalchemy_safe: bool = True, +) -> Any: + custom_encoder = custom_encoder or {} + if custom_encoder: + if type(obj) in custom_encoder: + return custom_encoder[type(obj)](obj) + else: + for encoder_type, encoder_instance in custom_encoder.items(): + if isinstance(obj, encoder_type): + return encoder_instance(obj) + if include is not None and not isinstance(include, (set, dict)): + include = set(include) + if exclude is not None and not isinstance(exclude, (set, dict)): + exclude = set(exclude) + if isinstance(obj, BaseModel): + encoder = getattr(obj.__config__, "json_encoders", {}) + if custom_encoder: + encoder.update(custom_encoder) + obj_dict = obj.dict( + include=include, # type: ignore # in Pydantic + exclude=exclude, # type: ignore # in Pydantic + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_none=exclude_none, + exclude_defaults=exclude_defaults, + ) + if "__root__" in obj_dict: + obj_dict = obj_dict["__root__"] + return jsonable_encoder( + obj_dict, + exclude_none=exclude_none, + exclude_defaults=exclude_defaults, + custom_encoder=encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + if dataclasses.is_dataclass(obj): + return dataclasses.asdict(obj) + if isinstance(obj, Enum): + return obj.value + if isinstance(obj, PurePath): + return str(obj) + if isinstance(obj, (str, int, float, type(None))): + return obj + if isinstance(obj, dict): + encoded_dict = {} + for key, value in obj.items(): + if ( + ( + not sqlalchemy_safe + or (not isinstance(key, str)) + or (not key.startswith("_sa")) + ) + and (value is not None or not exclude_none) + and ((include and key in include) or not exclude or key not in exclude) + ): + encoded_key = jsonable_encoder( + key, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + encoded_value = jsonable_encoder( + value, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + encoded_dict[encoded_key] = encoded_value + return encoded_dict + if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)): + encoded_list = [] + for item in obj: + encoded_list.append( + jsonable_encoder( + item, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + ) + return encoded_list + + if type(obj) in ENCODERS_BY_TYPE: + return ENCODERS_BY_TYPE[type(obj)](obj) + for encoder, classes_tuple in encoders_by_class_tuples.items(): + if isinstance(obj, classes_tuple): + return encoder(obj) + + errors: List[Exception] = [] + try: + data = dict(obj) + except Exception as e: + errors.append(e) + try: + data = vars(obj) + except Exception as e: + errors.append(e) + raise ValueError(errors) + return jsonable_encoder( + data, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) diff --git a/venv/lib/python3.10/site-packages/fastapi/exception_handlers.py b/venv/lib/python3.10/site-packages/fastapi/exception_handlers.py new file mode 100644 index 0000000..2b286d7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/exception_handlers.py @@ -0,0 +1,25 @@ +from fastapi.encoders import jsonable_encoder +from fastapi.exceptions import RequestValidationError +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.responses import JSONResponse +from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY + + +async def http_exception_handler(request: Request, exc: HTTPException) -> JSONResponse: + headers = getattr(exc, "headers", None) + if headers: + return JSONResponse( + {"detail": exc.detail}, status_code=exc.status_code, headers=headers + ) + else: + return JSONResponse({"detail": exc.detail}, status_code=exc.status_code) + + +async def request_validation_exception_handler( + request: Request, exc: RequestValidationError +) -> JSONResponse: + return JSONResponse( + status_code=HTTP_422_UNPROCESSABLE_ENTITY, + content={"detail": jsonable_encoder(exc.errors())}, + ) diff --git a/venv/lib/python3.10/site-packages/fastapi/exceptions.py b/venv/lib/python3.10/site-packages/fastapi/exceptions.py new file mode 100644 index 0000000..f4a837b --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/exceptions.py @@ -0,0 +1,37 @@ +from typing import Any, Dict, Optional, Sequence, Type + +from pydantic import BaseModel, ValidationError, create_model +from pydantic.error_wrappers import ErrorList +from starlette.exceptions import HTTPException as StarletteHTTPException + + +class HTTPException(StarletteHTTPException): + def __init__( + self, + status_code: int, + detail: Any = None, + headers: Optional[Dict[str, Any]] = None, + ) -> None: + super().__init__(status_code=status_code, detail=detail) + self.headers = headers + + +RequestErrorModel: Type[BaseModel] = create_model("Request") +WebSocketErrorModel: Type[BaseModel] = create_model("WebSocket") + + +class FastAPIError(RuntimeError): + """ + A generic, FastAPI-specific error. + """ + + +class RequestValidationError(ValidationError): + def __init__(self, errors: Sequence[ErrorList], *, body: Any = None) -> None: + self.body = body + super().__init__(errors, RequestErrorModel) + + +class WebSocketRequestValidationError(ValidationError): + def __init__(self, errors: Sequence[ErrorList]) -> None: + super().__init__(errors, WebSocketErrorModel) diff --git a/venv/lib/python3.10/site-packages/fastapi/logger.py b/venv/lib/python3.10/site-packages/fastapi/logger.py new file mode 100644 index 0000000..5b2c4ad --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/logger.py @@ -0,0 +1,3 @@ +import logging + +logger = logging.getLogger("fastapi") diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/__init__.py b/venv/lib/python3.10/site-packages/fastapi/middleware/__init__.py new file mode 100644 index 0000000..620296d --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/middleware/__init__.py @@ -0,0 +1 @@ +from starlette.middleware import Middleware as Middleware diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..97cc8ce Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/asyncexitstack.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/asyncexitstack.cpython-310.pyc new file mode 100644 index 0000000..c37c04c Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/asyncexitstack.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/cors.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/cors.cpython-310.pyc new file mode 100644 index 0000000..ea31f87 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/cors.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/gzip.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/gzip.cpython-310.pyc new file mode 100644 index 0000000..5904a81 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/gzip.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/httpsredirect.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/httpsredirect.cpython-310.pyc new file mode 100644 index 0000000..2182245 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/httpsredirect.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/trustedhost.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/trustedhost.cpython-310.pyc new file mode 100644 index 0000000..c3e5d43 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/trustedhost.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/wsgi.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/wsgi.cpython-310.pyc new file mode 100644 index 0000000..84b8526 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/middleware/__pycache__/wsgi.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py b/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py new file mode 100644 index 0000000..503a68a --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/middleware/asyncexitstack.py @@ -0,0 +1,28 @@ +from typing import Optional + +from fastapi.concurrency import AsyncExitStack +from starlette.types import ASGIApp, Receive, Scope, Send + + +class AsyncExitStackMiddleware: + def __init__(self, app: ASGIApp, context_name: str = "fastapi_astack") -> None: + self.app = app + self.context_name = context_name + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if AsyncExitStack: + dependency_exception: Optional[Exception] = None + async with AsyncExitStack() as stack: + scope[self.context_name] = stack + try: + await self.app(scope, receive, send) + except Exception as e: + dependency_exception = e + raise e + if dependency_exception: + # This exception was possibly handled by the dependency but it should + # still bubble up so that the ServerErrorMiddleware can return a 500 + # or the ExceptionMiddleware can catch and handle any other exceptions + raise dependency_exception + else: + await self.app(scope, receive, send) # pragma: no cover diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/cors.py b/venv/lib/python3.10/site-packages/fastapi/middleware/cors.py new file mode 100644 index 0000000..8dfaad0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/middleware/cors.py @@ -0,0 +1 @@ +from starlette.middleware.cors import CORSMiddleware as CORSMiddleware # noqa diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/gzip.py b/venv/lib/python3.10/site-packages/fastapi/middleware/gzip.py new file mode 100644 index 0000000..bbeb2cc --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/middleware/gzip.py @@ -0,0 +1 @@ +from starlette.middleware.gzip import GZipMiddleware as GZipMiddleware # noqa diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/httpsredirect.py b/venv/lib/python3.10/site-packages/fastapi/middleware/httpsredirect.py new file mode 100644 index 0000000..b7a3d8e --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/middleware/httpsredirect.py @@ -0,0 +1,3 @@ +from starlette.middleware.httpsredirect import ( # noqa + HTTPSRedirectMiddleware as HTTPSRedirectMiddleware, +) diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/trustedhost.py b/venv/lib/python3.10/site-packages/fastapi/middleware/trustedhost.py new file mode 100644 index 0000000..08d7e03 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/middleware/trustedhost.py @@ -0,0 +1,3 @@ +from starlette.middleware.trustedhost import ( # noqa + TrustedHostMiddleware as TrustedHostMiddleware, +) diff --git a/venv/lib/python3.10/site-packages/fastapi/middleware/wsgi.py b/venv/lib/python3.10/site-packages/fastapi/middleware/wsgi.py new file mode 100644 index 0000000..c4c6a79 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/middleware/wsgi.py @@ -0,0 +1 @@ +from starlette.middleware.wsgi import WSGIMiddleware as WSGIMiddleware # noqa diff --git a/venv/lib/python3.10/site-packages/fastapi/openapi/__init__.py b/venv/lib/python3.10/site-packages/fastapi/openapi/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..1bac5f9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/constants.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/constants.cpython-310.pyc new file mode 100644 index 0000000..4b3cea3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/constants.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/docs.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/docs.cpython-310.pyc new file mode 100644 index 0000000..aea500b Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/docs.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/models.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/models.cpython-310.pyc new file mode 100644 index 0000000..2841dc3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/models.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000..20c1cc8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/openapi/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/openapi/constants.py b/venv/lib/python3.10/site-packages/fastapi/openapi/constants.py new file mode 100644 index 0000000..3e69e55 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/openapi/constants.py @@ -0,0 +1,3 @@ +METHODS_WITH_BODY = {"GET", "HEAD", "POST", "PUT", "DELETE", "PATCH"} +STATUS_CODES_WITH_NO_BODY = {100, 101, 102, 103, 204, 304} +REF_PREFIX = "#/components/schemas/" diff --git a/venv/lib/python3.10/site-packages/fastapi/openapi/docs.py b/venv/lib/python3.10/site-packages/fastapi/openapi/docs.py new file mode 100644 index 0000000..d6af17a --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/openapi/docs.py @@ -0,0 +1,187 @@ +import json +from typing import Any, Dict, Optional + +from fastapi.encoders import jsonable_encoder +from starlette.responses import HTMLResponse + +swagger_ui_default_parameters = { + "dom_id": "#swagger-ui", + "layout": "BaseLayout", + "deepLinking": True, + "showExtensions": True, + "showCommonExtensions": True, +} + + +def get_swagger_ui_html( + *, + openapi_url: str, + title: str, + swagger_js_url: str = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@4/swagger-ui-bundle.js", + swagger_css_url: str = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@4/swagger-ui.css", + swagger_favicon_url: str = "https://fastapi.tiangolo.com/img/favicon.png", + oauth2_redirect_url: Optional[str] = None, + init_oauth: Optional[Dict[str, Any]] = None, + swagger_ui_parameters: Optional[Dict[str, Any]] = None, +) -> HTMLResponse: + current_swagger_ui_parameters = swagger_ui_default_parameters.copy() + if swagger_ui_parameters: + current_swagger_ui_parameters.update(swagger_ui_parameters) + + html = f""" + + + + + + {title} + + +
    +
    + + + + + + """ + return HTMLResponse(html) + + +def get_redoc_html( + *, + openapi_url: str, + title: str, + redoc_js_url: str = "https://cdn.jsdelivr.net/npm/redoc@next/bundles/redoc.standalone.js", + redoc_favicon_url: str = "https://fastapi.tiangolo.com/img/favicon.png", + with_google_fonts: bool = True, +) -> HTMLResponse: + html = f""" + + + + {title} + + + + """ + if with_google_fonts: + html += """ + + """ + html += f""" + + + + + + + + + + """ + return HTMLResponse(html) + + +def get_swagger_ui_oauth2_redirect_html() -> HTMLResponse: + html = """ + + + + + + + """ + return HTMLResponse(content=html) diff --git a/venv/lib/python3.10/site-packages/fastapi/openapi/models.py b/venv/lib/python3.10/site-packages/fastapi/openapi/models.py new file mode 100644 index 0000000..9c6598d --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/openapi/models.py @@ -0,0 +1,406 @@ +from enum import Enum +from typing import Any, Callable, Dict, Iterable, List, Optional, Union + +from fastapi.logger import logger +from pydantic import AnyUrl, BaseModel, Field + +try: + import email_validator # type: ignore + + assert email_validator # make autoflake ignore the unused import + from pydantic import EmailStr +except ImportError: # pragma: no cover + + class EmailStr(str): # type: ignore + @classmethod + def __get_validators__(cls) -> Iterable[Callable[..., Any]]: + yield cls.validate + + @classmethod + def validate(cls, v: Any) -> str: + logger.warning( + "email-validator not installed, email fields will be treated as str.\n" + "To install, run: pip install email-validator" + ) + return str(v) + + +class Contact(BaseModel): + name: Optional[str] = None + url: Optional[AnyUrl] = None + email: Optional[EmailStr] = None + + class Config: + extra = "allow" + + +class License(BaseModel): + name: str + url: Optional[AnyUrl] = None + + class Config: + extra = "allow" + + +class Info(BaseModel): + title: str + description: Optional[str] = None + termsOfService: Optional[str] = None + contact: Optional[Contact] = None + license: Optional[License] = None + version: str + + class Config: + extra = "allow" + + +class ServerVariable(BaseModel): + enum: Optional[List[str]] = None + default: str + description: Optional[str] = None + + class Config: + extra = "allow" + + +class Server(BaseModel): + url: Union[AnyUrl, str] + description: Optional[str] = None + variables: Optional[Dict[str, ServerVariable]] = None + + class Config: + extra = "allow" + + +class Reference(BaseModel): + ref: str = Field(..., alias="$ref") + + +class Discriminator(BaseModel): + propertyName: str + mapping: Optional[Dict[str, str]] = None + + +class XML(BaseModel): + name: Optional[str] = None + namespace: Optional[str] = None + prefix: Optional[str] = None + attribute: Optional[bool] = None + wrapped: Optional[bool] = None + + class Config: + extra = "allow" + + +class ExternalDocumentation(BaseModel): + description: Optional[str] = None + url: AnyUrl + + class Config: + extra = "allow" + + +class Schema(BaseModel): + ref: Optional[str] = Field(None, alias="$ref") + title: Optional[str] = None + multipleOf: Optional[float] = None + maximum: Optional[float] = None + exclusiveMaximum: Optional[float] = None + minimum: Optional[float] = None + exclusiveMinimum: Optional[float] = None + maxLength: Optional[int] = Field(None, gte=0) + minLength: Optional[int] = Field(None, gte=0) + pattern: Optional[str] = None + maxItems: Optional[int] = Field(None, gte=0) + minItems: Optional[int] = Field(None, gte=0) + uniqueItems: Optional[bool] = None + maxProperties: Optional[int] = Field(None, gte=0) + minProperties: Optional[int] = Field(None, gte=0) + required: Optional[List[str]] = None + enum: Optional[List[Any]] = None + type: Optional[str] = None + allOf: Optional[List["Schema"]] = None + oneOf: Optional[List["Schema"]] = None + anyOf: Optional[List["Schema"]] = None + not_: Optional["Schema"] = Field(None, alias="not") + items: Optional[Union["Schema", List["Schema"]]] = None + properties: Optional[Dict[str, "Schema"]] = None + additionalProperties: Optional[Union["Schema", Reference, bool]] = None + description: Optional[str] = None + format: Optional[str] = None + default: Optional[Any] = None + nullable: Optional[bool] = None + discriminator: Optional[Discriminator] = None + readOnly: Optional[bool] = None + writeOnly: Optional[bool] = None + xml: Optional[XML] = None + externalDocs: Optional[ExternalDocumentation] = None + example: Optional[Any] = None + deprecated: Optional[bool] = None + + class Config: + extra: str = "allow" + + +class Example(BaseModel): + summary: Optional[str] = None + description: Optional[str] = None + value: Optional[Any] = None + externalValue: Optional[AnyUrl] = None + + class Config: + extra = "allow" + + +class ParameterInType(Enum): + query = "query" + header = "header" + path = "path" + cookie = "cookie" + + +class Encoding(BaseModel): + contentType: Optional[str] = None + headers: Optional[Dict[str, Union["Header", Reference]]] = None + style: Optional[str] = None + explode: Optional[bool] = None + allowReserved: Optional[bool] = None + + class Config: + extra = "allow" + + +class MediaType(BaseModel): + schema_: Optional[Union[Schema, Reference]] = Field(None, alias="schema") + example: Optional[Any] = None + examples: Optional[Dict[str, Union[Example, Reference]]] = None + encoding: Optional[Dict[str, Encoding]] = None + + class Config: + extra = "allow" + + +class ParameterBase(BaseModel): + description: Optional[str] = None + required: Optional[bool] = None + deprecated: Optional[bool] = None + # Serialization rules for simple scenarios + style: Optional[str] = None + explode: Optional[bool] = None + allowReserved: Optional[bool] = None + schema_: Optional[Union[Schema, Reference]] = Field(None, alias="schema") + example: Optional[Any] = None + examples: Optional[Dict[str, Union[Example, Reference]]] = None + # Serialization rules for more complex scenarios + content: Optional[Dict[str, MediaType]] = None + + class Config: + extra = "allow" + + +class Parameter(ParameterBase): + name: str + in_: ParameterInType = Field(..., alias="in") + + +class Header(ParameterBase): + pass + + +class RequestBody(BaseModel): + description: Optional[str] = None + content: Dict[str, MediaType] + required: Optional[bool] = None + + class Config: + extra = "allow" + + +class Link(BaseModel): + operationRef: Optional[str] = None + operationId: Optional[str] = None + parameters: Optional[Dict[str, Union[Any, str]]] = None + requestBody: Optional[Union[Any, str]] = None + description: Optional[str] = None + server: Optional[Server] = None + + class Config: + extra = "allow" + + +class Response(BaseModel): + description: str + headers: Optional[Dict[str, Union[Header, Reference]]] = None + content: Optional[Dict[str, MediaType]] = None + links: Optional[Dict[str, Union[Link, Reference]]] = None + + class Config: + extra = "allow" + + +class Operation(BaseModel): + tags: Optional[List[str]] = None + summary: Optional[str] = None + description: Optional[str] = None + externalDocs: Optional[ExternalDocumentation] = None + operationId: Optional[str] = None + parameters: Optional[List[Union[Parameter, Reference]]] = None + requestBody: Optional[Union[RequestBody, Reference]] = None + # Using Any for Specification Extensions + responses: Dict[str, Union[Response, Any]] + callbacks: Optional[Dict[str, Union[Dict[str, "PathItem"], Reference]]] = None + deprecated: Optional[bool] = None + security: Optional[List[Dict[str, List[str]]]] = None + servers: Optional[List[Server]] = None + + class Config: + extra = "allow" + + +class PathItem(BaseModel): + ref: Optional[str] = Field(None, alias="$ref") + summary: Optional[str] = None + description: Optional[str] = None + get: Optional[Operation] = None + put: Optional[Operation] = None + post: Optional[Operation] = None + delete: Optional[Operation] = None + options: Optional[Operation] = None + head: Optional[Operation] = None + patch: Optional[Operation] = None + trace: Optional[Operation] = None + servers: Optional[List[Server]] = None + parameters: Optional[List[Union[Parameter, Reference]]] = None + + class Config: + extra = "allow" + + +class SecuritySchemeType(Enum): + apiKey = "apiKey" + http = "http" + oauth2 = "oauth2" + openIdConnect = "openIdConnect" + + +class SecurityBase(BaseModel): + type_: SecuritySchemeType = Field(..., alias="type") + description: Optional[str] = None + + class Config: + extra = "allow" + + +class APIKeyIn(Enum): + query = "query" + header = "header" + cookie = "cookie" + + +class APIKey(SecurityBase): + type_ = Field(SecuritySchemeType.apiKey, alias="type") + in_: APIKeyIn = Field(..., alias="in") + name: str + + +class HTTPBase(SecurityBase): + type_ = Field(SecuritySchemeType.http, alias="type") + scheme: str + + +class HTTPBearer(HTTPBase): + scheme = "bearer" + bearerFormat: Optional[str] = None + + +class OAuthFlow(BaseModel): + refreshUrl: Optional[str] = None + scopes: Dict[str, str] = {} + + class Config: + extra = "allow" + + +class OAuthFlowImplicit(OAuthFlow): + authorizationUrl: str + + +class OAuthFlowPassword(OAuthFlow): + tokenUrl: str + + +class OAuthFlowClientCredentials(OAuthFlow): + tokenUrl: str + + +class OAuthFlowAuthorizationCode(OAuthFlow): + authorizationUrl: str + tokenUrl: str + + +class OAuthFlows(BaseModel): + implicit: Optional[OAuthFlowImplicit] = None + password: Optional[OAuthFlowPassword] = None + clientCredentials: Optional[OAuthFlowClientCredentials] = None + authorizationCode: Optional[OAuthFlowAuthorizationCode] = None + + class Config: + extra = "allow" + + +class OAuth2(SecurityBase): + type_ = Field(SecuritySchemeType.oauth2, alias="type") + flows: OAuthFlows + + +class OpenIdConnect(SecurityBase): + type_ = Field(SecuritySchemeType.openIdConnect, alias="type") + openIdConnectUrl: str + + +SecurityScheme = Union[APIKey, HTTPBase, OAuth2, OpenIdConnect, HTTPBearer] + + +class Components(BaseModel): + schemas: Optional[Dict[str, Union[Schema, Reference]]] = None + responses: Optional[Dict[str, Union[Response, Reference]]] = None + parameters: Optional[Dict[str, Union[Parameter, Reference]]] = None + examples: Optional[Dict[str, Union[Example, Reference]]] = None + requestBodies: Optional[Dict[str, Union[RequestBody, Reference]]] = None + headers: Optional[Dict[str, Union[Header, Reference]]] = None + securitySchemes: Optional[Dict[str, Union[SecurityScheme, Reference]]] = None + links: Optional[Dict[str, Union[Link, Reference]]] = None + # Using Any for Specification Extensions + callbacks: Optional[Dict[str, Union[Dict[str, PathItem], Reference, Any]]] = None + + class Config: + extra = "allow" + + +class Tag(BaseModel): + name: str + description: Optional[str] = None + externalDocs: Optional[ExternalDocumentation] = None + + class Config: + extra = "allow" + + +class OpenAPI(BaseModel): + openapi: str + info: Info + servers: Optional[List[Server]] = None + # Using Any for Specification Extensions + paths: Dict[str, Union[PathItem, Any]] + components: Optional[Components] = None + security: Optional[List[Dict[str, List[str]]]] = None + tags: Optional[List[Tag]] = None + externalDocs: Optional[ExternalDocumentation] = None + + class Config: + extra = "allow" + + +Schema.update_forward_refs() +Operation.update_forward_refs() +Encoding.update_forward_refs() diff --git a/venv/lib/python3.10/site-packages/fastapi/openapi/utils.py b/venv/lib/python3.10/site-packages/fastapi/openapi/utils.py new file mode 100644 index 0000000..4eb727b --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/openapi/utils.py @@ -0,0 +1,443 @@ +import http.client +import inspect +import warnings +from enum import Enum +from typing import Any, Dict, List, Optional, Sequence, Set, Tuple, Type, Union, cast + +from fastapi import routing +from fastapi.datastructures import DefaultPlaceholder +from fastapi.dependencies.models import Dependant +from fastapi.dependencies.utils import get_flat_dependant, get_flat_params +from fastapi.encoders import jsonable_encoder +from fastapi.openapi.constants import ( + METHODS_WITH_BODY, + REF_PREFIX, + STATUS_CODES_WITH_NO_BODY, +) +from fastapi.openapi.models import OpenAPI +from fastapi.params import Body, Param +from fastapi.responses import Response +from fastapi.utils import ( + deep_dict_update, + generate_operation_id_for_path, + get_model_definitions, +) +from pydantic import BaseModel +from pydantic.fields import ModelField, Undefined +from pydantic.schema import ( + field_schema, + get_flat_models_from_fields, + get_model_name_map, +) +from pydantic.utils import lenient_issubclass +from starlette.responses import JSONResponse +from starlette.routing import BaseRoute +from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY + +validation_error_definition = { + "title": "ValidationError", + "type": "object", + "properties": { + "loc": { + "title": "Location", + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "integer"}]}, + }, + "msg": {"title": "Message", "type": "string"}, + "type": {"title": "Error Type", "type": "string"}, + }, + "required": ["loc", "msg", "type"], +} + +validation_error_response_definition = { + "title": "HTTPValidationError", + "type": "object", + "properties": { + "detail": { + "title": "Detail", + "type": "array", + "items": {"$ref": REF_PREFIX + "ValidationError"}, + } + }, +} + +status_code_ranges: Dict[str, str] = { + "1XX": "Information", + "2XX": "Success", + "3XX": "Redirection", + "4XX": "Client Error", + "5XX": "Server Error", + "DEFAULT": "Default Response", +} + + +def get_openapi_security_definitions( + flat_dependant: Dependant, +) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]: + security_definitions = {} + operation_security = [] + for security_requirement in flat_dependant.security_requirements: + security_definition = jsonable_encoder( + security_requirement.security_scheme.model, + by_alias=True, + exclude_none=True, + ) + security_name = security_requirement.security_scheme.scheme_name + security_definitions[security_name] = security_definition + operation_security.append({security_name: security_requirement.scopes}) + return security_definitions, operation_security + + +def get_openapi_operation_parameters( + *, + all_route_params: Sequence[ModelField], + model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str], +) -> List[Dict[str, Any]]: + parameters = [] + for param in all_route_params: + field_info = param.field_info + field_info = cast(Param, field_info) + if not field_info.include_in_schema: + continue + parameter = { + "name": param.alias, + "in": field_info.in_.value, + "required": param.required, + "schema": field_schema( + param, model_name_map=model_name_map, ref_prefix=REF_PREFIX + )[0], + } + if field_info.description: + parameter["description"] = field_info.description + if field_info.examples: + parameter["examples"] = jsonable_encoder(field_info.examples) + elif field_info.example != Undefined: + parameter["example"] = jsonable_encoder(field_info.example) + if field_info.deprecated: + parameter["deprecated"] = field_info.deprecated + parameters.append(parameter) + return parameters + + +def get_openapi_operation_request_body( + *, + body_field: Optional[ModelField], + model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str], +) -> Optional[Dict[str, Any]]: + if not body_field: + return None + assert isinstance(body_field, ModelField) + body_schema, _, _ = field_schema( + body_field, model_name_map=model_name_map, ref_prefix=REF_PREFIX + ) + field_info = cast(Body, body_field.field_info) + request_media_type = field_info.media_type + required = body_field.required + request_body_oai: Dict[str, Any] = {} + if required: + request_body_oai["required"] = required + request_media_content: Dict[str, Any] = {"schema": body_schema} + if field_info.examples: + request_media_content["examples"] = jsonable_encoder(field_info.examples) + elif field_info.example != Undefined: + request_media_content["example"] = jsonable_encoder(field_info.example) + request_body_oai["content"] = {request_media_type: request_media_content} + return request_body_oai + + +def generate_operation_id( + *, route: routing.APIRoute, method: str +) -> str: # pragma: nocover + warnings.warn( + "fastapi.openapi.utils.generate_operation_id() was deprecated, " + "it is not used internally, and will be removed soon", + DeprecationWarning, + stacklevel=2, + ) + if route.operation_id: + return route.operation_id + path: str = route.path_format + return generate_operation_id_for_path(name=route.name, path=path, method=method) + + +def generate_operation_summary(*, route: routing.APIRoute, method: str) -> str: + if route.summary: + return route.summary + return route.name.replace("_", " ").title() + + +def get_openapi_operation_metadata( + *, route: routing.APIRoute, method: str, operation_ids: Set[str] +) -> Dict[str, Any]: + operation: Dict[str, Any] = {} + if route.tags: + operation["tags"] = route.tags + operation["summary"] = generate_operation_summary(route=route, method=method) + if route.description: + operation["description"] = route.description + operation_id = route.operation_id or route.unique_id + if operation_id in operation_ids: + message = ( + f"Duplicate Operation ID {operation_id} for function " + + f"{route.endpoint.__name__}" + ) + file_name = getattr(route.endpoint, "__globals__", {}).get("__file__") + if file_name: + message += f" at {file_name}" + warnings.warn(message) + operation_ids.add(operation_id) + operation["operationId"] = operation_id + if route.deprecated: + operation["deprecated"] = route.deprecated + return operation + + +def get_openapi_path( + *, route: routing.APIRoute, model_name_map: Dict[type, str], operation_ids: Set[str] +) -> Tuple[Dict[str, Any], Dict[str, Any], Dict[str, Any]]: + path = {} + security_schemes: Dict[str, Any] = {} + definitions: Dict[str, Any] = {} + assert route.methods is not None, "Methods must be a list" + if isinstance(route.response_class, DefaultPlaceholder): + current_response_class: Type[Response] = route.response_class.value + else: + current_response_class = route.response_class + assert current_response_class, "A response class is needed to generate OpenAPI" + route_response_media_type: Optional[str] = current_response_class.media_type + if route.include_in_schema: + for method in route.methods: + operation = get_openapi_operation_metadata( + route=route, method=method, operation_ids=operation_ids + ) + parameters: List[Dict[str, Any]] = [] + flat_dependant = get_flat_dependant(route.dependant, skip_repeats=True) + security_definitions, operation_security = get_openapi_security_definitions( + flat_dependant=flat_dependant + ) + if operation_security: + operation.setdefault("security", []).extend(operation_security) + if security_definitions: + security_schemes.update(security_definitions) + all_route_params = get_flat_params(route.dependant) + operation_parameters = get_openapi_operation_parameters( + all_route_params=all_route_params, model_name_map=model_name_map + ) + parameters.extend(operation_parameters) + if parameters: + operation["parameters"] = list( + {param["name"]: param for param in parameters}.values() + ) + if method in METHODS_WITH_BODY: + request_body_oai = get_openapi_operation_request_body( + body_field=route.body_field, model_name_map=model_name_map + ) + if request_body_oai: + operation["requestBody"] = request_body_oai + if route.callbacks: + callbacks = {} + for callback in route.callbacks: + if isinstance(callback, routing.APIRoute): + ( + cb_path, + cb_security_schemes, + cb_definitions, + ) = get_openapi_path( + route=callback, + model_name_map=model_name_map, + operation_ids=operation_ids, + ) + callbacks[callback.name] = {callback.path: cb_path} + operation["callbacks"] = callbacks + if route.status_code is not None: + status_code = str(route.status_code) + else: + # It would probably make more sense for all response classes to have an + # explicit default status_code, and to extract it from them, instead of + # doing this inspection tricks, that would probably be in the future + # TODO: probably make status_code a default class attribute for all + # responses in Starlette + response_signature = inspect.signature(current_response_class.__init__) + status_code_param = response_signature.parameters.get("status_code") + if status_code_param is not None: + if isinstance(status_code_param.default, int): + status_code = str(status_code_param.default) + operation.setdefault("responses", {}).setdefault(status_code, {})[ + "description" + ] = route.response_description + if ( + route_response_media_type + and route.status_code not in STATUS_CODES_WITH_NO_BODY + ): + response_schema = {"type": "string"} + if lenient_issubclass(current_response_class, JSONResponse): + if route.response_field: + response_schema, _, _ = field_schema( + route.response_field, + model_name_map=model_name_map, + ref_prefix=REF_PREFIX, + ) + else: + response_schema = {} + operation.setdefault("responses", {}).setdefault( + status_code, {} + ).setdefault("content", {}).setdefault(route_response_media_type, {})[ + "schema" + ] = response_schema + if route.responses: + operation_responses = operation.setdefault("responses", {}) + for ( + additional_status_code, + additional_response, + ) in route.responses.items(): + process_response = additional_response.copy() + process_response.pop("model", None) + status_code_key = str(additional_status_code).upper() + if status_code_key == "DEFAULT": + status_code_key = "default" + openapi_response = operation_responses.setdefault( + status_code_key, {} + ) + assert isinstance( + process_response, dict + ), "An additional response must be a dict" + field = route.response_fields.get(additional_status_code) + additional_field_schema: Optional[Dict[str, Any]] = None + if field: + additional_field_schema, _, _ = field_schema( + field, model_name_map=model_name_map, ref_prefix=REF_PREFIX + ) + media_type = route_response_media_type or "application/json" + additional_schema = ( + process_response.setdefault("content", {}) + .setdefault(media_type, {}) + .setdefault("schema", {}) + ) + deep_dict_update(additional_schema, additional_field_schema) + status_text: Optional[str] = status_code_ranges.get( + str(additional_status_code).upper() + ) or http.client.responses.get(int(additional_status_code)) + description = ( + process_response.get("description") + or openapi_response.get("description") + or status_text + or "Additional Response" + ) + deep_dict_update(openapi_response, process_response) + openapi_response["description"] = description + http422 = str(HTTP_422_UNPROCESSABLE_ENTITY) + if (all_route_params or route.body_field) and not any( + [ + status in operation["responses"] + for status in [http422, "4XX", "default"] + ] + ): + operation["responses"][http422] = { + "description": "Validation Error", + "content": { + "application/json": { + "schema": {"$ref": REF_PREFIX + "HTTPValidationError"} + } + }, + } + if "ValidationError" not in definitions: + definitions.update( + { + "ValidationError": validation_error_definition, + "HTTPValidationError": validation_error_response_definition, + } + ) + if route.openapi_extra: + deep_dict_update(operation, route.openapi_extra) + path[method.lower()] = operation + return path, security_schemes, definitions + + +def get_flat_models_from_routes( + routes: Sequence[BaseRoute], +) -> Set[Union[Type[BaseModel], Type[Enum]]]: + body_fields_from_routes: List[ModelField] = [] + responses_from_routes: List[ModelField] = [] + request_fields_from_routes: List[ModelField] = [] + callback_flat_models: Set[Union[Type[BaseModel], Type[Enum]]] = set() + for route in routes: + if getattr(route, "include_in_schema", None) and isinstance( + route, routing.APIRoute + ): + if route.body_field: + assert isinstance( + route.body_field, ModelField + ), "A request body must be a Pydantic Field" + body_fields_from_routes.append(route.body_field) + if route.response_field: + responses_from_routes.append(route.response_field) + if route.response_fields: + responses_from_routes.extend(route.response_fields.values()) + if route.callbacks: + callback_flat_models |= get_flat_models_from_routes(route.callbacks) + params = get_flat_params(route.dependant) + request_fields_from_routes.extend(params) + + flat_models = callback_flat_models | get_flat_models_from_fields( + body_fields_from_routes + responses_from_routes + request_fields_from_routes, + known_models=set(), + ) + return flat_models + + +def get_openapi( + *, + title: str, + version: str, + openapi_version: str = "3.0.2", + description: Optional[str] = None, + routes: Sequence[BaseRoute], + tags: Optional[List[Dict[str, Any]]] = None, + servers: Optional[List[Dict[str, Union[str, Any]]]] = None, + terms_of_service: Optional[str] = None, + contact: Optional[Dict[str, Union[str, Any]]] = None, + license_info: Optional[Dict[str, Union[str, Any]]] = None, +) -> Dict[str, Any]: + info: Dict[str, Any] = {"title": title, "version": version} + if description: + info["description"] = description + if terms_of_service: + info["termsOfService"] = terms_of_service + if contact: + info["contact"] = contact + if license_info: + info["license"] = license_info + output: Dict[str, Any] = {"openapi": openapi_version, "info": info} + if servers: + output["servers"] = servers + components: Dict[str, Dict[str, Any]] = {} + paths: Dict[str, Dict[str, Any]] = {} + operation_ids: Set[str] = set() + flat_models = get_flat_models_from_routes(routes) + model_name_map = get_model_name_map(flat_models) + definitions = get_model_definitions( + flat_models=flat_models, model_name_map=model_name_map + ) + for route in routes: + if isinstance(route, routing.APIRoute): + result = get_openapi_path( + route=route, model_name_map=model_name_map, operation_ids=operation_ids + ) + if result: + path, security_schemes, path_definitions = result + if path: + paths.setdefault(route.path_format, {}).update(path) + if security_schemes: + components.setdefault("securitySchemes", {}).update( + security_schemes + ) + if path_definitions: + definitions.update(path_definitions) + if definitions: + components["schemas"] = {k: definitions[k] for k in sorted(definitions)} + if components: + output["components"] = components + output["paths"] = paths + if tags: + output["tags"] = tags + return jsonable_encoder(OpenAPI(**output), by_alias=True, exclude_none=True) # type: ignore diff --git a/venv/lib/python3.10/site-packages/fastapi/param_functions.py b/venv/lib/python3.10/site-packages/fastapi/param_functions.py new file mode 100644 index 0000000..a553a14 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/param_functions.py @@ -0,0 +1,290 @@ +from typing import Any, Callable, Dict, Optional, Sequence + +from fastapi import params +from pydantic.fields import Undefined + + +def Path( # noqa: N802 + default: Any, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, +) -> Any: + return params.Path( + default=default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + **extra, + ) + + +def Query( # noqa: N802 + default: Any, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, +) -> Any: + return params.Query( + default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + **extra, + ) + + +def Header( # noqa: N802 + default: Any, + *, + alias: Optional[str] = None, + convert_underscores: bool = True, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, +) -> Any: + return params.Header( + default, + alias=alias, + convert_underscores=convert_underscores, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + **extra, + ) + + +def Cookie( # noqa: N802 + default: Any, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, +) -> Any: + return params.Cookie( + default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + **extra, + ) + + +def Body( # noqa: N802 + default: Any, + *, + embed: bool = False, + media_type: str = "application/json", + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + **extra: Any, +) -> Any: + return params.Body( + default, + embed=embed, + media_type=media_type, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + **extra, + ) + + +def Form( # noqa: N802 + default: Any, + *, + media_type: str = "application/x-www-form-urlencoded", + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + **extra: Any, +) -> Any: + return params.Form( + default, + media_type=media_type, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + **extra, + ) + + +def File( # noqa: N802 + default: Any, + *, + media_type: str = "multipart/form-data", + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + **extra: Any, +) -> Any: + return params.File( + default, + media_type=media_type, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + **extra, + ) + + +def Depends( # noqa: N802 + dependency: Optional[Callable[..., Any]] = None, *, use_cache: bool = True +) -> Any: + return params.Depends(dependency=dependency, use_cache=use_cache) + + +def Security( # noqa: N802 + dependency: Optional[Callable[..., Any]] = None, + *, + scopes: Optional[Sequence[str]] = None, + use_cache: bool = True, +) -> Any: + return params.Security(dependency=dependency, scopes=scopes, use_cache=use_cache) diff --git a/venv/lib/python3.10/site-packages/fastapi/params.py b/venv/lib/python3.10/site-packages/fastapi/params.py new file mode 100644 index 0000000..042bbd4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/params.py @@ -0,0 +1,380 @@ +from enum import Enum +from typing import Any, Callable, Dict, Optional, Sequence + +from pydantic.fields import FieldInfo, Undefined + + +class ParamTypes(Enum): + query = "query" + header = "header" + path = "path" + cookie = "cookie" + + +class Param(FieldInfo): + in_: ParamTypes + + def __init__( + self, + default: Any, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, + ): + self.deprecated = deprecated + self.example = example + self.examples = examples + self.include_in_schema = include_in_schema + super().__init__( + default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + **extra, + ) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.default})" + + +class Path(Param): + in_ = ParamTypes.path + + def __init__( + self, + default: Any, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, + ): + self.in_ = self.in_ + super().__init__( + ..., + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + deprecated=deprecated, + example=example, + examples=examples, + include_in_schema=include_in_schema, + **extra, + ) + + +class Query(Param): + in_ = ParamTypes.query + + def __init__( + self, + default: Any, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, + ): + super().__init__( + default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + deprecated=deprecated, + example=example, + examples=examples, + include_in_schema=include_in_schema, + **extra, + ) + + +class Header(Param): + in_ = ParamTypes.header + + def __init__( + self, + default: Any, + *, + alias: Optional[str] = None, + convert_underscores: bool = True, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, + ): + self.convert_underscores = convert_underscores + super().__init__( + default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + deprecated=deprecated, + example=example, + examples=examples, + include_in_schema=include_in_schema, + **extra, + ) + + +class Cookie(Param): + in_ = ParamTypes.cookie + + def __init__( + self, + default: Any, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, + ): + super().__init__( + default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + deprecated=deprecated, + example=example, + examples=examples, + include_in_schema=include_in_schema, + **extra, + ) + + +class Body(FieldInfo): + def __init__( + self, + default: Any, + *, + embed: bool = False, + media_type: str = "application/json", + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + **extra: Any, + ): + self.embed = embed + self.media_type = media_type + self.example = example + self.examples = examples + super().__init__( + default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + **extra, + ) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.default})" + + +class Form(Body): + def __init__( + self, + default: Any, + *, + media_type: str = "application/x-www-form-urlencoded", + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + **extra: Any, + ): + super().__init__( + default, + embed=True, + media_type=media_type, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + **extra, + ) + + +class File(Form): + def __init__( + self, + default: Any, + *, + media_type: str = "multipart/form-data", + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + **extra: Any, + ): + super().__init__( + default, + media_type=media_type, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + **extra, + ) + + +class Depends: + def __init__( + self, dependency: Optional[Callable[..., Any]] = None, *, use_cache: bool = True + ): + self.dependency = dependency + self.use_cache = use_cache + + def __repr__(self) -> str: + attr = getattr(self.dependency, "__name__", type(self.dependency).__name__) + cache = "" if self.use_cache else ", use_cache=False" + return f"{self.__class__.__name__}({attr}{cache})" + + +class Security(Depends): + def __init__( + self, + dependency: Optional[Callable[..., Any]] = None, + *, + scopes: Optional[Sequence[str]] = None, + use_cache: bool = True, + ): + super().__init__(dependency=dependency, use_cache=use_cache) + self.scopes = scopes or [] diff --git a/venv/lib/python3.10/site-packages/fastapi/py.typed b/venv/lib/python3.10/site-packages/fastapi/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/fastapi/requests.py b/venv/lib/python3.10/site-packages/fastapi/requests.py new file mode 100644 index 0000000..d16552c --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/requests.py @@ -0,0 +1,2 @@ +from starlette.requests import HTTPConnection as HTTPConnection # noqa: F401 +from starlette.requests import Request as Request # noqa: F401 diff --git a/venv/lib/python3.10/site-packages/fastapi/responses.py b/venv/lib/python3.10/site-packages/fastapi/responses.py new file mode 100644 index 0000000..6cd7931 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/responses.py @@ -0,0 +1,34 @@ +from typing import Any + +from starlette.responses import FileResponse as FileResponse # noqa +from starlette.responses import HTMLResponse as HTMLResponse # noqa +from starlette.responses import JSONResponse as JSONResponse # noqa +from starlette.responses import PlainTextResponse as PlainTextResponse # noqa +from starlette.responses import RedirectResponse as RedirectResponse # noqa +from starlette.responses import Response as Response # noqa +from starlette.responses import StreamingResponse as StreamingResponse # noqa + +try: + import ujson +except ImportError: # pragma: nocover + ujson = None # type: ignore + + +try: + import orjson +except ImportError: # pragma: nocover + orjson = None # type: ignore + + +class UJSONResponse(JSONResponse): + def render(self, content: Any) -> bytes: + assert ujson is not None, "ujson must be installed to use UJSONResponse" + return ujson.dumps(content, ensure_ascii=False).encode("utf-8") + + +class ORJSONResponse(JSONResponse): + media_type = "application/json" + + def render(self, content: Any) -> bytes: + assert orjson is not None, "orjson must be installed to use ORJSONResponse" + return orjson.dumps(content) diff --git a/venv/lib/python3.10/site-packages/fastapi/routing.py b/venv/lib/python3.10/site-packages/fastapi/routing.py new file mode 100644 index 0000000..7a15f39 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/routing.py @@ -0,0 +1,1228 @@ +import asyncio +import dataclasses +import email.message +import inspect +import json +from enum import Enum, IntEnum +from typing import ( + Any, + Callable, + Coroutine, + Dict, + List, + Optional, + Sequence, + Set, + Tuple, + Type, + Union, +) + +from fastapi import params +from fastapi.datastructures import Default, DefaultPlaceholder +from fastapi.dependencies.models import Dependant +from fastapi.dependencies.utils import ( + get_body_field, + get_dependant, + get_parameterless_sub_dependant, + solve_dependencies, +) +from fastapi.encoders import DictIntStrAny, SetIntStr, jsonable_encoder +from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError +from fastapi.openapi.constants import STATUS_CODES_WITH_NO_BODY +from fastapi.types import DecoratedCallable +from fastapi.utils import ( + create_cloned_field, + create_response_field, + generate_unique_id, + get_value_or_default, +) +from pydantic import BaseModel +from pydantic.error_wrappers import ErrorWrapper, ValidationError +from pydantic.fields import ModelField, Undefined +from starlette import routing +from starlette.concurrency import run_in_threadpool +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.responses import JSONResponse, Response +from starlette.routing import BaseRoute, Match +from starlette.routing import Mount as Mount # noqa +from starlette.routing import ( + compile_path, + get_name, + request_response, + websocket_session, +) +from starlette.status import WS_1008_POLICY_VIOLATION +from starlette.types import ASGIApp, Scope +from starlette.websockets import WebSocket + + +def _prepare_response_content( + res: Any, + *, + exclude_unset: bool, + exclude_defaults: bool = False, + exclude_none: bool = False, +) -> Any: + if isinstance(res, BaseModel): + read_with_orm_mode = getattr(res.__config__, "read_with_orm_mode", None) + if read_with_orm_mode: + # Let from_orm extract the data from this model instead of converting + # it now to a dict. + # Otherwise there's no way to extract lazy data that requires attribute + # access instead of dict iteration, e.g. lazy relationships. + return res + return res.dict( + by_alias=True, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + elif isinstance(res, list): + return [ + _prepare_response_content( + item, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + for item in res + ] + elif isinstance(res, dict): + return { + k: _prepare_response_content( + v, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + for k, v in res.items() + } + elif dataclasses.is_dataclass(res): + return dataclasses.asdict(res) + return res + + +async def serialize_response( + *, + field: Optional[ModelField] = None, + response_content: Any, + include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + by_alias: bool = True, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + is_coroutine: bool = True, +) -> Any: + if field: + errors = [] + response_content = _prepare_response_content( + response_content, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + if is_coroutine: + value, errors_ = field.validate(response_content, {}, loc=("response",)) + else: + value, errors_ = await run_in_threadpool( + field.validate, response_content, {}, loc=("response",) + ) + if isinstance(errors_, ErrorWrapper): + errors.append(errors_) + elif isinstance(errors_, list): + errors.extend(errors_) + if errors: + raise ValidationError(errors, field.type_) + return jsonable_encoder( + value, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + else: + return jsonable_encoder(response_content) + + +async def run_endpoint_function( + *, dependant: Dependant, values: Dict[str, Any], is_coroutine: bool +) -> Any: + # Only called by get_request_handler. Has been split into its own function to + # facilitate profiling endpoints, since inner functions are harder to profile. + assert dependant.call is not None, "dependant.call must be a function" + + if is_coroutine: + return await dependant.call(**values) + else: + return await run_in_threadpool(dependant.call, **values) + + +def get_request_handler( + dependant: Dependant, + body_field: Optional[ModelField] = None, + status_code: Optional[int] = None, + response_class: Union[Type[Response], DefaultPlaceholder] = Default(JSONResponse), + response_field: Optional[ModelField] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + dependency_overrides_provider: Optional[Any] = None, +) -> Callable[[Request], Coroutine[Any, Any, Response]]: + assert dependant.call is not None, "dependant.call must be a function" + is_coroutine = asyncio.iscoroutinefunction(dependant.call) + is_body_form = body_field and isinstance(body_field.field_info, params.Form) + if isinstance(response_class, DefaultPlaceholder): + actual_response_class: Type[Response] = response_class.value + else: + actual_response_class = response_class + + async def app(request: Request) -> Response: + try: + body: Any = None + if body_field: + if is_body_form: + body = await request.form() + else: + body_bytes = await request.body() + if body_bytes: + json_body: Any = Undefined + content_type_value = request.headers.get("content-type") + if not content_type_value: + json_body = await request.json() + else: + message = email.message.Message() + message["content-type"] = content_type_value + if message.get_content_maintype() == "application": + subtype = message.get_content_subtype() + if subtype == "json" or subtype.endswith("+json"): + json_body = await request.json() + if json_body != Undefined: + body = json_body + else: + body = body_bytes + except json.JSONDecodeError as e: + raise RequestValidationError([ErrorWrapper(e, ("body", e.pos))], body=e.doc) + except Exception as e: + raise HTTPException( + status_code=400, detail="There was an error parsing the body" + ) from e + solved_result = await solve_dependencies( + request=request, + dependant=dependant, + body=body, + dependency_overrides_provider=dependency_overrides_provider, + ) + values, errors, background_tasks, sub_response, _ = solved_result + if errors: + raise RequestValidationError(errors, body=body) + else: + raw_response = await run_endpoint_function( + dependant=dependant, values=values, is_coroutine=is_coroutine + ) + + if isinstance(raw_response, Response): + if raw_response.background is None: + raw_response.background = background_tasks + return raw_response + response_data = await serialize_response( + field=response_field, + response_content=raw_response, + include=response_model_include, + exclude=response_model_exclude, + by_alias=response_model_by_alias, + exclude_unset=response_model_exclude_unset, + exclude_defaults=response_model_exclude_defaults, + exclude_none=response_model_exclude_none, + is_coroutine=is_coroutine, + ) + response_args: Dict[str, Any] = {"background": background_tasks} + # If status_code was set, use it, otherwise use the default from the + # response class, in the case of redirect it's 307 + if status_code is not None: + response_args["status_code"] = status_code + response = actual_response_class(response_data, **response_args) + response.headers.raw.extend(sub_response.headers.raw) + if sub_response.status_code: + response.status_code = sub_response.status_code + return response + + return app + + +def get_websocket_app( + dependant: Dependant, dependency_overrides_provider: Optional[Any] = None +) -> Callable[[WebSocket], Coroutine[Any, Any, Any]]: + async def app(websocket: WebSocket) -> None: + solved_result = await solve_dependencies( + request=websocket, + dependant=dependant, + dependency_overrides_provider=dependency_overrides_provider, + ) + values, errors, _, _2, _3 = solved_result + if errors: + await websocket.close(code=WS_1008_POLICY_VIOLATION) + raise WebSocketRequestValidationError(errors) + assert dependant.call is not None, "dependant.call must be a function" + await dependant.call(**values) + + return app + + +class APIWebSocketRoute(routing.WebSocketRoute): + def __init__( + self, + path: str, + endpoint: Callable[..., Any], + *, + name: Optional[str] = None, + dependency_overrides_provider: Optional[Any] = None, + ) -> None: + self.path = path + self.endpoint = endpoint + self.name = get_name(endpoint) if name is None else name + self.dependant = get_dependant(path=path, call=self.endpoint) + self.app = websocket_session( + get_websocket_app( + dependant=self.dependant, + dependency_overrides_provider=dependency_overrides_provider, + ) + ) + self.path_regex, self.path_format, self.param_convertors = compile_path(path) + + def matches(self, scope: Scope) -> Tuple[Match, Scope]: + match, child_scope = super().matches(scope) + if match != Match.NONE: + child_scope["route"] = self + return match, child_scope + + +class APIRoute(routing.Route): + def __init__( + self, + path: str, + endpoint: Callable[..., Any], + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + name: Optional[str] = None, + methods: Optional[Union[Set[str], List[str]]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Union[Type[Response], DefaultPlaceholder] = Default( + JSONResponse + ), + dependency_overrides_provider: Optional[Any] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Union[ + Callable[["APIRoute"], str], DefaultPlaceholder + ] = Default(generate_unique_id), + ) -> None: + self.path = path + self.endpoint = endpoint + self.response_model = response_model + self.summary = summary + self.response_description = response_description + self.deprecated = deprecated + self.operation_id = operation_id + self.response_model_include = response_model_include + self.response_model_exclude = response_model_exclude + self.response_model_by_alias = response_model_by_alias + self.response_model_exclude_unset = response_model_exclude_unset + self.response_model_exclude_defaults = response_model_exclude_defaults + self.response_model_exclude_none = response_model_exclude_none + self.include_in_schema = include_in_schema + self.response_class = response_class + self.dependency_overrides_provider = dependency_overrides_provider + self.callbacks = callbacks + self.openapi_extra = openapi_extra + self.generate_unique_id_function = generate_unique_id_function + self.tags = tags or [] + self.responses = responses or {} + self.name = get_name(endpoint) if name is None else name + self.path_regex, self.path_format, self.param_convertors = compile_path(path) + if methods is None: + methods = ["GET"] + self.methods: Set[str] = set([method.upper() for method in methods]) + if isinstance(generate_unique_id_function, DefaultPlaceholder): + current_generate_unique_id: Callable[ + ["APIRoute"], str + ] = generate_unique_id_function.value + else: + current_generate_unique_id = generate_unique_id_function + self.unique_id = self.operation_id or current_generate_unique_id(self) + # normalize enums e.g. http.HTTPStatus + if isinstance(status_code, IntEnum): + status_code = int(status_code) + self.status_code = status_code + if self.response_model: + assert ( + status_code not in STATUS_CODES_WITH_NO_BODY + ), f"Status code {status_code} must not have a response body" + response_name = "Response_" + self.unique_id + self.response_field = create_response_field( + name=response_name, type_=self.response_model + ) + # Create a clone of the field, so that a Pydantic submodel is not returned + # as is just because it's an instance of a subclass of a more limited class + # e.g. UserInDB (containing hashed_password) could be a subclass of User + # that doesn't have the hashed_password. But because it's a subclass, it + # would pass the validation and be returned as is. + # By being a new field, no inheritance will be passed as is. A new model + # will be always created. + self.secure_cloned_response_field: Optional[ + ModelField + ] = create_cloned_field(self.response_field) + else: + self.response_field = None # type: ignore + self.secure_cloned_response_field = None + if dependencies: + self.dependencies = list(dependencies) + else: + self.dependencies = [] + self.description = description or inspect.cleandoc(self.endpoint.__doc__ or "") + # if a "form feed" character (page break) is found in the description text, + # truncate description text to the content preceding the first "form feed" + self.description = self.description.split("\f")[0] + response_fields = {} + for additional_status_code, response in self.responses.items(): + assert isinstance(response, dict), "An additional response must be a dict" + model = response.get("model") + if model: + assert ( + additional_status_code not in STATUS_CODES_WITH_NO_BODY + ), f"Status code {additional_status_code} must not have a response body" + response_name = f"Response_{additional_status_code}_{self.unique_id}" + response_field = create_response_field(name=response_name, type_=model) + response_fields[additional_status_code] = response_field + if response_fields: + self.response_fields: Dict[Union[int, str], ModelField] = response_fields + else: + self.response_fields = {} + + assert callable(endpoint), "An endpoint must be a callable" + self.dependant = get_dependant(path=self.path_format, call=self.endpoint) + for depends in self.dependencies[::-1]: + self.dependant.dependencies.insert( + 0, + get_parameterless_sub_dependant(depends=depends, path=self.path_format), + ) + self.body_field = get_body_field(dependant=self.dependant, name=self.unique_id) + self.app = request_response(self.get_route_handler()) + + def get_route_handler(self) -> Callable[[Request], Coroutine[Any, Any, Response]]: + return get_request_handler( + dependant=self.dependant, + body_field=self.body_field, + status_code=self.status_code, + response_class=self.response_class, + response_field=self.secure_cloned_response_field, + response_model_include=self.response_model_include, + response_model_exclude=self.response_model_exclude, + response_model_by_alias=self.response_model_by_alias, + response_model_exclude_unset=self.response_model_exclude_unset, + response_model_exclude_defaults=self.response_model_exclude_defaults, + response_model_exclude_none=self.response_model_exclude_none, + dependency_overrides_provider=self.dependency_overrides_provider, + ) + + def matches(self, scope: Scope) -> Tuple[Match, Scope]: + match, child_scope = super().matches(scope) + if match != Match.NONE: + child_scope["route"] = self + return match, child_scope + + +class APIRouter(routing.Router): + def __init__( + self, + *, + prefix: str = "", + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + default_response_class: Type[Response] = Default(JSONResponse), + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + callbacks: Optional[List[BaseRoute]] = None, + routes: Optional[List[routing.BaseRoute]] = None, + redirect_slashes: bool = True, + default: Optional[ASGIApp] = None, + dependency_overrides_provider: Optional[Any] = None, + route_class: Type[APIRoute] = APIRoute, + on_startup: Optional[Sequence[Callable[[], Any]]] = None, + on_shutdown: Optional[Sequence[Callable[[], Any]]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> None: + super().__init__( + routes=routes, # type: ignore # in Starlette + redirect_slashes=redirect_slashes, + default=default, # type: ignore # in Starlette + on_startup=on_startup, # type: ignore # in Starlette + on_shutdown=on_shutdown, # type: ignore # in Starlette + ) + if prefix: + assert prefix.startswith("/"), "A path prefix must start with '/'" + assert not prefix.endswith( + "/" + ), "A path prefix must not end with '/', as the routes will start with '/'" + self.prefix = prefix + self.tags: List[Union[str, Enum]] = tags or [] + self.dependencies = list(dependencies or []) or [] + self.deprecated = deprecated + self.include_in_schema = include_in_schema + self.responses = responses or {} + self.callbacks = callbacks or [] + self.dependency_overrides_provider = dependency_overrides_provider + self.route_class = route_class + self.default_response_class = default_response_class + self.generate_unique_id_function = generate_unique_id_function + + def add_api_route( + self, + path: str, + endpoint: Callable[..., Any], + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[Union[Set[str], List[str]]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Union[Type[Response], DefaultPlaceholder] = Default( + JSONResponse + ), + name: Optional[str] = None, + route_class_override: Optional[Type[APIRoute]] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Union[ + Callable[[APIRoute], str], DefaultPlaceholder + ] = Default(generate_unique_id), + ) -> None: + route_class = route_class_override or self.route_class + responses = responses or {} + combined_responses = {**self.responses, **responses} + current_response_class = get_value_or_default( + response_class, self.default_response_class + ) + current_tags = self.tags.copy() + if tags: + current_tags.extend(tags) + current_dependencies = self.dependencies.copy() + if dependencies: + current_dependencies.extend(dependencies) + current_callbacks = self.callbacks.copy() + if callbacks: + current_callbacks.extend(callbacks) + current_generate_unique_id = get_value_or_default( + generate_unique_id_function, self.generate_unique_id_function + ) + route = route_class( + self.prefix + path, + endpoint=endpoint, + response_model=response_model, + status_code=status_code, + tags=current_tags, + dependencies=current_dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=combined_responses, + deprecated=deprecated or self.deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema and self.include_in_schema, + response_class=current_response_class, + name=name, + dependency_overrides_provider=self.dependency_overrides_provider, + callbacks=current_callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=current_generate_unique_id, + ) + self.routes.append(route) + + def api_route( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[List[str]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_api_route( + path, + func, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + return func + + return decorator + + def add_api_websocket_route( + self, path: str, endpoint: Callable[..., Any], name: Optional[str] = None + ) -> None: + route = APIWebSocketRoute( + self.prefix + path, + endpoint=endpoint, + name=name, + dependency_overrides_provider=self.dependency_overrides_provider, + ) + self.routes.append(route) + + def websocket( + self, path: str, name: Optional[str] = None + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_api_websocket_route(path, func, name=name) + return func + + return decorator + + def include_router( + self, + router: "APIRouter", + *, + prefix: str = "", + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + default_response_class: Type[Response] = Default(JSONResponse), + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + callbacks: Optional[List[BaseRoute]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> None: + if prefix: + assert prefix.startswith("/"), "A path prefix must start with '/'" + assert not prefix.endswith( + "/" + ), "A path prefix must not end with '/', as the routes will start with '/'" + else: + for r in router.routes: + path = getattr(r, "path") + name = getattr(r, "name", "unknown") + if path is not None and not path: + raise Exception( + f"Prefix and path cannot be both empty (path operation: {name})" + ) + if responses is None: + responses = {} + for route in router.routes: + if isinstance(route, APIRoute): + combined_responses = {**responses, **route.responses} + use_response_class = get_value_or_default( + route.response_class, + router.default_response_class, + default_response_class, + self.default_response_class, + ) + current_tags = [] + if tags: + current_tags.extend(tags) + if route.tags: + current_tags.extend(route.tags) + current_dependencies: List[params.Depends] = [] + if dependencies: + current_dependencies.extend(dependencies) + if route.dependencies: + current_dependencies.extend(route.dependencies) + current_callbacks = [] + if callbacks: + current_callbacks.extend(callbacks) + if route.callbacks: + current_callbacks.extend(route.callbacks) + current_generate_unique_id = get_value_or_default( + route.generate_unique_id_function, + router.generate_unique_id_function, + generate_unique_id_function, + self.generate_unique_id_function, + ) + self.add_api_route( + prefix + route.path, + route.endpoint, + response_model=route.response_model, + status_code=route.status_code, + tags=current_tags, + dependencies=current_dependencies, + summary=route.summary, + description=route.description, + response_description=route.response_description, + responses=combined_responses, + deprecated=route.deprecated or deprecated or self.deprecated, + methods=route.methods, + operation_id=route.operation_id, + response_model_include=route.response_model_include, + response_model_exclude=route.response_model_exclude, + response_model_by_alias=route.response_model_by_alias, + response_model_exclude_unset=route.response_model_exclude_unset, + response_model_exclude_defaults=route.response_model_exclude_defaults, + response_model_exclude_none=route.response_model_exclude_none, + include_in_schema=route.include_in_schema + and self.include_in_schema + and include_in_schema, + response_class=use_response_class, + name=route.name, + route_class_override=type(route), + callbacks=current_callbacks, + openapi_extra=route.openapi_extra, + generate_unique_id_function=current_generate_unique_id, + ) + elif isinstance(route, routing.Route): + methods = list(route.methods or []) # type: ignore # in Starlette + self.add_route( + prefix + route.path, + route.endpoint, + methods=methods, + include_in_schema=route.include_in_schema, + name=route.name, + ) + elif isinstance(route, APIWebSocketRoute): + self.add_api_websocket_route( + prefix + route.path, route.endpoint, name=route.name + ) + elif isinstance(route, routing.WebSocketRoute): + self.add_websocket_route( + prefix + route.path, route.endpoint, name=route.name + ) + for handler in router.on_startup: + self.add_event_handler("startup", handler) + for handler in router.on_shutdown: + self.add_event_handler("shutdown", handler) + + def get( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["GET"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def put( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["PUT"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def post( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["POST"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def delete( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["DELETE"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def options( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["OPTIONS"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def head( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["HEAD"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def patch( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["PATCH"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def trace( + self, + path: str, + *, + response_model: Optional[Type[Any]] = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["TRACE"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) diff --git a/venv/lib/python3.10/site-packages/fastapi/security/__init__.py b/venv/lib/python3.10/site-packages/fastapi/security/__init__.py new file mode 100644 index 0000000..3aa6bf2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/security/__init__.py @@ -0,0 +1,15 @@ +from .api_key import APIKeyCookie as APIKeyCookie +from .api_key import APIKeyHeader as APIKeyHeader +from .api_key import APIKeyQuery as APIKeyQuery +from .http import HTTPAuthorizationCredentials as HTTPAuthorizationCredentials +from .http import HTTPBasic as HTTPBasic +from .http import HTTPBasicCredentials as HTTPBasicCredentials +from .http import HTTPBearer as HTTPBearer +from .http import HTTPDigest as HTTPDigest +from .oauth2 import OAuth2 as OAuth2 +from .oauth2 import OAuth2AuthorizationCodeBearer as OAuth2AuthorizationCodeBearer +from .oauth2 import OAuth2PasswordBearer as OAuth2PasswordBearer +from .oauth2 import OAuth2PasswordRequestForm as OAuth2PasswordRequestForm +from .oauth2 import OAuth2PasswordRequestFormStrict as OAuth2PasswordRequestFormStrict +from .oauth2 import SecurityScopes as SecurityScopes +from .open_id_connect_url import OpenIdConnect as OpenIdConnect diff --git a/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..2e8d099 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/api_key.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/api_key.cpython-310.pyc new file mode 100644 index 0000000..400adb2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/api_key.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000..b62e52d Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/base.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/http.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/http.cpython-310.pyc new file mode 100644 index 0000000..e8f9b02 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/http.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/oauth2.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/oauth2.cpython-310.pyc new file mode 100644 index 0000000..293fc96 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/oauth2.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/open_id_connect_url.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/open_id_connect_url.cpython-310.pyc new file mode 100644 index 0000000..869b6fb Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/open_id_connect_url.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000..13366b9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi/security/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi/security/api_key.py b/venv/lib/python3.10/site-packages/fastapi/security/api_key.py new file mode 100644 index 0000000..36ab60e --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/security/api_key.py @@ -0,0 +1,92 @@ +from typing import Optional + +from fastapi.openapi.models import APIKey, APIKeyIn +from fastapi.security.base import SecurityBase +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.status import HTTP_403_FORBIDDEN + + +class APIKeyBase(SecurityBase): + pass + + +class APIKeyQuery(APIKeyBase): + def __init__( + self, + *, + name: str, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True + ): + self.model: APIKey = APIKey( + **{"in": APIKeyIn.query}, name=name, description=description + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + api_key: str = request.query_params.get(self.model.name) + if not api_key: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return api_key + + +class APIKeyHeader(APIKeyBase): + def __init__( + self, + *, + name: str, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True + ): + self.model: APIKey = APIKey( + **{"in": APIKeyIn.header}, name=name, description=description + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + api_key: str = request.headers.get(self.model.name) + if not api_key: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return api_key + + +class APIKeyCookie(APIKeyBase): + def __init__( + self, + *, + name: str, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True + ): + self.model: APIKey = APIKey( + **{"in": APIKeyIn.cookie}, name=name, description=description + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + api_key = request.cookies.get(self.model.name) + if not api_key: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return api_key diff --git a/venv/lib/python3.10/site-packages/fastapi/security/base.py b/venv/lib/python3.10/site-packages/fastapi/security/base.py new file mode 100644 index 0000000..c43555d --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/security/base.py @@ -0,0 +1,6 @@ +from fastapi.openapi.models import SecurityBase as SecurityBaseModel + + +class SecurityBase: + model: SecurityBaseModel + scheme_name: str diff --git a/venv/lib/python3.10/site-packages/fastapi/security/http.py b/venv/lib/python3.10/site-packages/fastapi/security/http.py new file mode 100644 index 0000000..1b473c6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/security/http.py @@ -0,0 +1,165 @@ +import binascii +from base64 import b64decode +from typing import Optional + +from fastapi.exceptions import HTTPException +from fastapi.openapi.models import HTTPBase as HTTPBaseModel +from fastapi.openapi.models import HTTPBearer as HTTPBearerModel +from fastapi.security.base import SecurityBase +from fastapi.security.utils import get_authorization_scheme_param +from pydantic import BaseModel +from starlette.requests import Request +from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN + + +class HTTPBasicCredentials(BaseModel): + username: str + password: str + + +class HTTPAuthorizationCredentials(BaseModel): + scheme: str + credentials: str + + +class HTTPBase(SecurityBase): + def __init__( + self, + *, + scheme: str, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + self.model = HTTPBaseModel(scheme=scheme, description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__( + self, request: Request + ) -> Optional[HTTPAuthorizationCredentials]: + authorization: str = request.headers.get("Authorization") + scheme, credentials = get_authorization_scheme_param(authorization) + if not (authorization and scheme and credentials): + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) + + +class HTTPBasic(HTTPBase): + def __init__( + self, + *, + scheme_name: Optional[str] = None, + realm: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + self.model = HTTPBaseModel(scheme="basic", description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.realm = realm + self.auto_error = auto_error + + async def __call__( # type: ignore + self, request: Request + ) -> Optional[HTTPBasicCredentials]: + authorization: str = request.headers.get("Authorization") + scheme, param = get_authorization_scheme_param(authorization) + if self.realm: + unauthorized_headers = {"WWW-Authenticate": f'Basic realm="{self.realm}"'} + else: + unauthorized_headers = {"WWW-Authenticate": "Basic"} + invalid_user_credentials_exc = HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Invalid authentication credentials", + headers=unauthorized_headers, + ) + if not authorization or scheme.lower() != "basic": + if self.auto_error: + raise HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers=unauthorized_headers, + ) + else: + return None + try: + data = b64decode(param).decode("ascii") + except (ValueError, UnicodeDecodeError, binascii.Error): + raise invalid_user_credentials_exc + username, separator, password = data.partition(":") + if not separator: + raise invalid_user_credentials_exc + return HTTPBasicCredentials(username=username, password=password) + + +class HTTPBearer(HTTPBase): + def __init__( + self, + *, + bearerFormat: Optional[str] = None, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + self.model = HTTPBearerModel(bearerFormat=bearerFormat, description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__( + self, request: Request + ) -> Optional[HTTPAuthorizationCredentials]: + authorization: str = request.headers.get("Authorization") + scheme, credentials = get_authorization_scheme_param(authorization) + if not (authorization and scheme and credentials): + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + if scheme.lower() != "bearer": + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, + detail="Invalid authentication credentials", + ) + else: + return None + return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) + + +class HTTPDigest(HTTPBase): + def __init__( + self, + *, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + self.model = HTTPBaseModel(scheme="digest", description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__( + self, request: Request + ) -> Optional[HTTPAuthorizationCredentials]: + authorization: str = request.headers.get("Authorization") + scheme, credentials = get_authorization_scheme_param(authorization) + if not (authorization and scheme and credentials): + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + if scheme.lower() != "digest": + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, + detail="Invalid authentication credentials", + ) + return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) diff --git a/venv/lib/python3.10/site-packages/fastapi/security/oauth2.py b/venv/lib/python3.10/site-packages/fastapi/security/oauth2.py new file mode 100644 index 0000000..bdc6e2e --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/security/oauth2.py @@ -0,0 +1,220 @@ +from typing import Any, Dict, List, Optional, Union + +from fastapi.exceptions import HTTPException +from fastapi.openapi.models import OAuth2 as OAuth2Model +from fastapi.openapi.models import OAuthFlows as OAuthFlowsModel +from fastapi.param_functions import Form +from fastapi.security.base import SecurityBase +from fastapi.security.utils import get_authorization_scheme_param +from starlette.requests import Request +from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN + + +class OAuth2PasswordRequestForm: + """ + This is a dependency class, use it like: + + @app.post("/login") + def login(form_data: OAuth2PasswordRequestForm = Depends()): + data = form_data.parse() + print(data.username) + print(data.password) + for scope in data.scopes: + print(scope) + if data.client_id: + print(data.client_id) + if data.client_secret: + print(data.client_secret) + return data + + + It creates the following Form request parameters in your endpoint: + + grant_type: the OAuth2 spec says it is required and MUST be the fixed string "password". + Nevertheless, this dependency class is permissive and allows not passing it. If you want to enforce it, + use instead the OAuth2PasswordRequestFormStrict dependency. + username: username string. The OAuth2 spec requires the exact field name "username". + password: password string. The OAuth2 spec requires the exact field name "password". + scope: Optional string. Several scopes (each one a string) separated by spaces. E.g. + "items:read items:write users:read profile openid" + client_id: optional string. OAuth2 recommends sending the client_id and client_secret (if any) + using HTTP Basic auth, as: client_id:client_secret + client_secret: optional string. OAuth2 recommends sending the client_id and client_secret (if any) + using HTTP Basic auth, as: client_id:client_secret + """ + + def __init__( + self, + grant_type: str = Form(None, regex="password"), + username: str = Form(...), + password: str = Form(...), + scope: str = Form(""), + client_id: Optional[str] = Form(None), + client_secret: Optional[str] = Form(None), + ): + self.grant_type = grant_type + self.username = username + self.password = password + self.scopes = scope.split() + self.client_id = client_id + self.client_secret = client_secret + + +class OAuth2PasswordRequestFormStrict(OAuth2PasswordRequestForm): + """ + This is a dependency class, use it like: + + @app.post("/login") + def login(form_data: OAuth2PasswordRequestFormStrict = Depends()): + data = form_data.parse() + print(data.username) + print(data.password) + for scope in data.scopes: + print(scope) + if data.client_id: + print(data.client_id) + if data.client_secret: + print(data.client_secret) + return data + + + It creates the following Form request parameters in your endpoint: + + grant_type: the OAuth2 spec says it is required and MUST be the fixed string "password". + This dependency is strict about it. If you want to be permissive, use instead the + OAuth2PasswordRequestForm dependency class. + username: username string. The OAuth2 spec requires the exact field name "username". + password: password string. The OAuth2 spec requires the exact field name "password". + scope: Optional string. Several scopes (each one a string) separated by spaces. E.g. + "items:read items:write users:read profile openid" + client_id: optional string. OAuth2 recommends sending the client_id and client_secret (if any) + using HTTP Basic auth, as: client_id:client_secret + client_secret: optional string. OAuth2 recommends sending the client_id and client_secret (if any) + using HTTP Basic auth, as: client_id:client_secret + """ + + def __init__( + self, + grant_type: str = Form(..., regex="password"), + username: str = Form(...), + password: str = Form(...), + scope: str = Form(""), + client_id: Optional[str] = Form(None), + client_secret: Optional[str] = Form(None), + ): + super().__init__( + grant_type=grant_type, + username=username, + password=password, + scope=scope, + client_id=client_id, + client_secret=client_secret, + ) + + +class OAuth2(SecurityBase): + def __init__( + self, + *, + flows: Union[OAuthFlowsModel, Dict[str, Dict[str, Any]]] = OAuthFlowsModel(), + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: Optional[bool] = True + ): + self.model = OAuth2Model(flows=flows, description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + authorization: str = request.headers.get("Authorization") + if not authorization: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return authorization + + +class OAuth2PasswordBearer(OAuth2): + def __init__( + self, + tokenUrl: str, + scheme_name: Optional[str] = None, + scopes: Optional[Dict[str, str]] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + if not scopes: + scopes = {} + flows = OAuthFlowsModel(password={"tokenUrl": tokenUrl, "scopes": scopes}) + super().__init__( + flows=flows, + scheme_name=scheme_name, + description=description, + auto_error=auto_error, + ) + + async def __call__(self, request: Request) -> Optional[str]: + authorization: str = request.headers.get("Authorization") + scheme, param = get_authorization_scheme_param(authorization) + if not authorization or scheme.lower() != "bearer": + if self.auto_error: + raise HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers={"WWW-Authenticate": "Bearer"}, + ) + else: + return None + return param + + +class OAuth2AuthorizationCodeBearer(OAuth2): + def __init__( + self, + authorizationUrl: str, + tokenUrl: str, + refreshUrl: Optional[str] = None, + scheme_name: Optional[str] = None, + scopes: Optional[Dict[str, str]] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + if not scopes: + scopes = {} + flows = OAuthFlowsModel( + authorizationCode={ + "authorizationUrl": authorizationUrl, + "tokenUrl": tokenUrl, + "refreshUrl": refreshUrl, + "scopes": scopes, + } + ) + super().__init__( + flows=flows, + scheme_name=scheme_name, + description=description, + auto_error=auto_error, + ) + + async def __call__(self, request: Request) -> Optional[str]: + authorization: str = request.headers.get("Authorization") + scheme, param = get_authorization_scheme_param(authorization) + if not authorization or scheme.lower() != "bearer": + if self.auto_error: + raise HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers={"WWW-Authenticate": "Bearer"}, + ) + else: + return None # pragma: nocover + return param + + +class SecurityScopes: + def __init__(self, scopes: Optional[List[str]] = None): + self.scopes = scopes or [] + self.scope_str = " ".join(self.scopes) diff --git a/venv/lib/python3.10/site-packages/fastapi/security/open_id_connect_url.py b/venv/lib/python3.10/site-packages/fastapi/security/open_id_connect_url.py new file mode 100644 index 0000000..dfe9f7b --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/security/open_id_connect_url.py @@ -0,0 +1,34 @@ +from typing import Optional + +from fastapi.openapi.models import OpenIdConnect as OpenIdConnectModel +from fastapi.security.base import SecurityBase +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.status import HTTP_403_FORBIDDEN + + +class OpenIdConnect(SecurityBase): + def __init__( + self, + *, + openIdConnectUrl: str, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True + ): + self.model = OpenIdConnectModel( + openIdConnectUrl=openIdConnectUrl, description=description + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + authorization: str = request.headers.get("Authorization") + if not authorization: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return authorization diff --git a/venv/lib/python3.10/site-packages/fastapi/security/utils.py b/venv/lib/python3.10/site-packages/fastapi/security/utils.py new file mode 100644 index 0000000..2da0dd2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/security/utils.py @@ -0,0 +1,8 @@ +from typing import Tuple + + +def get_authorization_scheme_param(authorization_header_value: str) -> Tuple[str, str]: + if not authorization_header_value: + return "", "" + scheme, _, param = authorization_header_value.partition(" ") + return scheme, param diff --git a/venv/lib/python3.10/site-packages/fastapi/staticfiles.py b/venv/lib/python3.10/site-packages/fastapi/staticfiles.py new file mode 100644 index 0000000..299015d --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/staticfiles.py @@ -0,0 +1 @@ +from starlette.staticfiles import StaticFiles as StaticFiles # noqa diff --git a/venv/lib/python3.10/site-packages/fastapi/templating.py b/venv/lib/python3.10/site-packages/fastapi/templating.py new file mode 100644 index 0000000..0cb8684 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/templating.py @@ -0,0 +1 @@ +from starlette.templating import Jinja2Templates as Jinja2Templates # noqa diff --git a/venv/lib/python3.10/site-packages/fastapi/testclient.py b/venv/lib/python3.10/site-packages/fastapi/testclient.py new file mode 100644 index 0000000..4012406 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/testclient.py @@ -0,0 +1 @@ +from starlette.testclient import TestClient as TestClient # noqa diff --git a/venv/lib/python3.10/site-packages/fastapi/types.py b/venv/lib/python3.10/site-packages/fastapi/types.py new file mode 100644 index 0000000..e0bca46 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/types.py @@ -0,0 +1,3 @@ +from typing import Any, Callable, TypeVar + +DecoratedCallable = TypeVar("DecoratedCallable", bound=Callable[..., Any]) diff --git a/venv/lib/python3.10/site-packages/fastapi/utils.py b/venv/lib/python3.10/site-packages/fastapi/utils.py new file mode 100644 index 0000000..b930149 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/utils.py @@ -0,0 +1,176 @@ +import functools +import re +import warnings +from dataclasses import is_dataclass +from enum import Enum +from typing import TYPE_CHECKING, Any, Dict, Optional, Set, Type, Union, cast + +import fastapi +from fastapi.datastructures import DefaultPlaceholder, DefaultType +from fastapi.openapi.constants import REF_PREFIX +from pydantic import BaseConfig, BaseModel, create_model +from pydantic.class_validators import Validator +from pydantic.fields import FieldInfo, ModelField, UndefinedType +from pydantic.schema import model_process_schema +from pydantic.utils import lenient_issubclass + +if TYPE_CHECKING: # pragma: nocover + from .routing import APIRoute + + +def get_model_definitions( + *, + flat_models: Set[Union[Type[BaseModel], Type[Enum]]], + model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str], +) -> Dict[str, Any]: + definitions: Dict[str, Dict[str, Any]] = {} + for model in flat_models: + m_schema, m_definitions, m_nested_models = model_process_schema( + model, model_name_map=model_name_map, ref_prefix=REF_PREFIX + ) + definitions.update(m_definitions) + model_name = model_name_map[model] + definitions[model_name] = m_schema + return definitions + + +def get_path_param_names(path: str) -> Set[str]: + return set(re.findall("{(.*?)}", path)) + + +def create_response_field( + name: str, + type_: Type[Any], + class_validators: Optional[Dict[str, Validator]] = None, + default: Optional[Any] = None, + required: Union[bool, UndefinedType] = False, + model_config: Type[BaseConfig] = BaseConfig, + field_info: Optional[FieldInfo] = None, + alias: Optional[str] = None, +) -> ModelField: + """ + Create a new response field. Raises if type_ is invalid. + """ + class_validators = class_validators or {} + field_info = field_info or FieldInfo(None) + + response_field = functools.partial( + ModelField, + name=name, + type_=type_, + class_validators=class_validators, + default=default, + required=required, + model_config=model_config, + alias=alias, + ) + + try: + return response_field(field_info=field_info) + except RuntimeError: + raise fastapi.exceptions.FastAPIError( + f"Invalid args for response field! Hint: check that {type_} is a valid pydantic field type" + ) + + +def create_cloned_field( + field: ModelField, + *, + cloned_types: Optional[Dict[Type[BaseModel], Type[BaseModel]]] = None, +) -> ModelField: + # _cloned_types has already cloned types, to support recursive models + if cloned_types is None: + cloned_types = dict() + original_type = field.type_ + if is_dataclass(original_type) and hasattr(original_type, "__pydantic_model__"): + original_type = original_type.__pydantic_model__ + use_type = original_type + if lenient_issubclass(original_type, BaseModel): + original_type = cast(Type[BaseModel], original_type) + use_type = cloned_types.get(original_type) + if use_type is None: + use_type = create_model(original_type.__name__, __base__=original_type) + cloned_types[original_type] = use_type + for f in original_type.__fields__.values(): + use_type.__fields__[f.name] = create_cloned_field( + f, cloned_types=cloned_types + ) + new_field = create_response_field(name=field.name, type_=use_type) + new_field.has_alias = field.has_alias + new_field.alias = field.alias + new_field.class_validators = field.class_validators + new_field.default = field.default + new_field.required = field.required + new_field.model_config = field.model_config + new_field.field_info = field.field_info + new_field.allow_none = field.allow_none + new_field.validate_always = field.validate_always + if field.sub_fields: + new_field.sub_fields = [ + create_cloned_field(sub_field, cloned_types=cloned_types) + for sub_field in field.sub_fields + ] + if field.key_field: + new_field.key_field = create_cloned_field( + field.key_field, cloned_types=cloned_types + ) + new_field.validators = field.validators + new_field.pre_validators = field.pre_validators + new_field.post_validators = field.post_validators + new_field.parse_json = field.parse_json + new_field.shape = field.shape + new_field.populate_validators() + return new_field + + +def generate_operation_id_for_path( + *, name: str, path: str, method: str +) -> str: # pragma: nocover + warnings.warn( + "fastapi.utils.generate_operation_id_for_path() was deprecated, " + "it is not used internally, and will be removed soon", + DeprecationWarning, + stacklevel=2, + ) + operation_id = name + path + operation_id = re.sub("[^0-9a-zA-Z_]", "_", operation_id) + operation_id = operation_id + "_" + method.lower() + return operation_id + + +def generate_unique_id(route: "APIRoute") -> str: + operation_id = route.name + route.path_format + operation_id = re.sub("[^0-9a-zA-Z_]", "_", operation_id) + assert route.methods + operation_id = operation_id + "_" + list(route.methods)[0].lower() + return operation_id + + +def deep_dict_update(main_dict: Dict[Any, Any], update_dict: Dict[Any, Any]) -> None: + for key in update_dict: + if ( + key in main_dict + and isinstance(main_dict[key], dict) + and isinstance(update_dict[key], dict) + ): + deep_dict_update(main_dict[key], update_dict[key]) + else: + main_dict[key] = update_dict[key] + + +def get_value_or_default( + first_item: Union[DefaultPlaceholder, DefaultType], + *extra_items: Union[DefaultPlaceholder, DefaultType], +) -> Union[DefaultPlaceholder, DefaultType]: + """ + Pass items or `DefaultPlaceholder`s by descending priority. + + The first one to _not_ be a `DefaultPlaceholder` will be returned. + + Otherwise, the first item (a `DefaultPlaceholder`) will be returned. + """ + items = (first_item,) + extra_items + for item in items: + if not isinstance(item, DefaultPlaceholder): + return item + return first_item diff --git a/venv/lib/python3.10/site-packages/fastapi/websockets.py b/venv/lib/python3.10/site-packages/fastapi/websockets.py new file mode 100644 index 0000000..bed672a --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi/websockets.py @@ -0,0 +1,2 @@ +from starlette.websockets import WebSocket as WebSocket # noqa +from starlette.websockets import WebSocketDisconnect as WebSocketDisconnect # noqa diff --git a/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/LICENSE b/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/LICENSE new file mode 100644 index 0000000..7082a2d --- /dev/null +++ b/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2013-2019 Nikolay Kim and Andrew Svetlov + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/METADATA b/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/METADATA new file mode 100644 index 0000000..235aca1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/METADATA @@ -0,0 +1,150 @@ +Metadata-Version: 2.1 +Name: frozenlist +Version: 1.3.3 +Summary: A list-like structure which implements collections.abc.MutableSequence +Home-page: https://github.com/aio-libs/frozenlist +Maintainer: aiohttp team +Maintainer-email: team@aiohttp.org +License: Apache 2 +Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby +Project-URL: CI: Github Actions, https://github.com/aio-libs/frozenlist/actions +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/frozenlist +Project-URL: Docs: RTD, https://frozenlist.readthedocs.io +Project-URL: GitHub: issues, https://github.com/aio-libs/frozenlist/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/frozenlist +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE + +========== +frozenlist +========== + +.. image:: https://github.com/aio-libs/frozenlist/workflows/CI/badge.svg + :target: https://github.com/aio-libs/frozenlist/actions + :alt: GitHub status for master branch + +.. image:: https://codecov.io/gh/aio-libs/frozenlist/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/frozenlist + :alt: codecov.io status for master branch + +.. image:: https://badge.fury.io/py/frozenlist.svg + :target: https://pypi.org/project/frozenlist + :alt: Latest PyPI package version + +.. image:: https://readthedocs.org/projects/frozenlist/badge/?version=latest + :target: https://frozenlist.readthedocs.io/ + :alt: Latest Read The Docs + +.. image:: https://img.shields.io/discourse/topics?server=https%3A%2F%2Faio-libs.discourse.group%2F + :target: https://aio-libs.discourse.group/ + :alt: Discourse group for io-libs + +.. image:: https://badges.gitter.im/Join%20Chat.svg + :target: https://gitter.im/aio-libs/Lobby + :alt: Chat on Gitter + +Introduction +============ + +``frozenlist.FrozenList`` is a list-like structure which implements +``collections.abc.MutableSequence``. The list is *mutable* until ``FrozenList.freeze`` +is called, after which list modifications raise ``RuntimeError``: + + +>>> from frozenlist import FrozenList +>>> fl = FrozenList([17, 42]) +>>> fl.append('spam') +>>> fl.append('Vikings') +>>> fl + +>>> fl.freeze() +>>> fl + +>>> fl.frozen +True +>>> fl.append("Monty") +Traceback (most recent call last): + File "", line 1, in + File "frozenlist/_frozenlist.pyx", line 97, in frozenlist._frozenlist.FrozenList.append + self._check_frozen() + File "frozenlist/_frozenlist.pyx", line 19, in frozenlist._frozenlist.FrozenList._check_frozen + raise RuntimeError("Cannot modify frozen list.") +RuntimeError: Cannot modify frozen list. + + +FrozenList is also hashable, but only when frozen. Otherwise it also throws a RuntimeError: + + +>>> fl = FrozenList([17, 42, 'spam']) +>>> hash(fl) +Traceback (most recent call last): + File "", line 1, in + File "frozenlist/_frozenlist.pyx", line 111, in frozenlist._frozenlist.FrozenList.__hash__ + raise RuntimeError("Cannot hash unfrozen list.") +RuntimeError: Cannot hash unfrozen list. +>>> fl.freeze() +>>> hash(fl) +3713081631934410656 +>>> dictionary = {fl: 'Vikings'} # frozen fl can be a dict key +>>> dictionary +{: 'Vikings'} + + +Installation +------------ + +:: + + $ pip install frozenlist + +The library requires Python 3.6 or newer. + + +Documentation +============= + +https://frozenlist.readthedocs.io/ + +Communication channels +====================== + +*aio-libs discourse group*: https://aio-libs.discourse.group + +Feel free to post your questions and ideas here. + +*gitter chat* https://gitter.im/aio-libs/Lobby + +Requirements +============ + +- Python >= 3.6 + +License +======= + +``frozenlist`` is offered under the Apache 2 license. + +Source code +=========== + +The project is hosted on GitHub_ + +Please file an issue in the `bug tracker +`_ if you have found a bug +or have some suggestions to improve the library. + +.. _GitHub: https://github.com/aio-libs/frozenlist diff --git a/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/RECORD b/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/RECORD new file mode 100644 index 0000000..188bb94 --- /dev/null +++ b/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/RECORD @@ -0,0 +1,12 @@ +frozenlist-1.3.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +frozenlist-1.3.3.dist-info/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332 +frozenlist-1.3.3.dist-info/METADATA,sha256=f8FI9mzqEFVV4IHXvWrRk8BuLIAwRWGJuYewaEidVQg,4693 +frozenlist-1.3.3.dist-info/RECORD,, +frozenlist-1.3.3.dist-info/WHEEL,sha256=rBYGOuaECaIr_qy4ezdKiID7sgQrj9GER0C0R3D19PY,225 +frozenlist-1.3.3.dist-info/top_level.txt,sha256=jivtxsPXA3nK3WBWW2LW5Mtu_GHt8UZA13NeCs2cKuA,11 +frozenlist/__init__.py,sha256=yf1hVJQUEyUwzTJau3i3SQeFDYYROwgNpJLMzHN58tM,2323 +frozenlist/__init__.pyi,sha256=vMEoES1xGegPtVXoCi9XydEeHsyuIq-KdeXwP5PdsaA,1470 +frozenlist/__pycache__/__init__.cpython-310.pyc,, +frozenlist/_frozenlist.cpython-310-x86_64-linux-gnu.so,sha256=wQWFY2G1TlbmuOHDMRDEzy0I0AnkgYKhiY0eAGraWRg,461304 +frozenlist/_frozenlist.pyx,sha256=9V4Z1En6TZwgFD26d-sjxyhUzUm338H1Qiz4-i5ukv0,2983 +frozenlist/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 diff --git a/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/WHEEL b/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/WHEEL new file mode 100644 index 0000000..ec97f6a --- /dev/null +++ b/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/WHEEL @@ -0,0 +1,8 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.2) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_5_x86_64 +Tag: cp310-cp310-manylinux1_x86_64 +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/top_level.txt new file mode 100644 index 0000000..52f13fc --- /dev/null +++ b/venv/lib/python3.10/site-packages/frozenlist-1.3.3.dist-info/top_level.txt @@ -0,0 +1 @@ +frozenlist diff --git a/venv/lib/python3.10/site-packages/frozenlist/__init__.py b/venv/lib/python3.10/site-packages/frozenlist/__init__.py new file mode 100644 index 0000000..4f172e7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/frozenlist/__init__.py @@ -0,0 +1,96 @@ +import os +import sys +import types +from collections.abc import MutableSequence +from functools import total_ordering +from typing import Tuple, Type + +__version__ = "1.3.3" + +__all__ = ("FrozenList", "PyFrozenList") # type: Tuple[str, ...] + + +NO_EXTENSIONS = bool(os.environ.get("FROZENLIST_NO_EXTENSIONS")) # type: bool + + +@total_ordering +class FrozenList(MutableSequence): + + __slots__ = ("_frozen", "_items") + + if sys.version_info >= (3, 9): + __class_getitem__ = classmethod(types.GenericAlias) + else: + + @classmethod + def __class_getitem__(cls: Type["FrozenList"]) -> Type["FrozenList"]: + return cls + + def __init__(self, items=None): + self._frozen = False + if items is not None: + items = list(items) + else: + items = [] + self._items = items + + @property + def frozen(self): + return self._frozen + + def freeze(self): + self._frozen = True + + def __getitem__(self, index): + return self._items[index] + + def __setitem__(self, index, value): + if self._frozen: + raise RuntimeError("Cannot modify frozen list.") + self._items[index] = value + + def __delitem__(self, index): + if self._frozen: + raise RuntimeError("Cannot modify frozen list.") + del self._items[index] + + def __len__(self): + return self._items.__len__() + + def __iter__(self): + return self._items.__iter__() + + def __reversed__(self): + return self._items.__reversed__() + + def __eq__(self, other): + return list(self) == other + + def __le__(self, other): + return list(self) <= other + + def insert(self, pos, item): + if self._frozen: + raise RuntimeError("Cannot modify frozen list.") + self._items.insert(pos, item) + + def __repr__(self): + return f"" + + def __hash__(self): + if self._frozen: + return hash(tuple(self)) + else: + raise RuntimeError("Cannot hash unfrozen list.") + + +PyFrozenList = FrozenList + + +try: + from ._frozenlist import FrozenList as CFrozenList # type: ignore + + if not NO_EXTENSIONS: # pragma: no cover + FrozenList = CFrozenList # type: ignore +except ImportError: # pragma: no cover + pass diff --git a/venv/lib/python3.10/site-packages/frozenlist/__init__.pyi b/venv/lib/python3.10/site-packages/frozenlist/__init__.pyi new file mode 100644 index 0000000..ae803ef --- /dev/null +++ b/venv/lib/python3.10/site-packages/frozenlist/__init__.pyi @@ -0,0 +1,47 @@ +from typing import ( + Generic, + Iterable, + Iterator, + List, + MutableSequence, + Optional, + TypeVar, + Union, + overload, +) + +_T = TypeVar("_T") +_Arg = Union[List[_T], Iterable[_T]] + +class FrozenList(MutableSequence[_T], Generic[_T]): + def __init__(self, items: Optional[_Arg[_T]] = None) -> None: ... + @property + def frozen(self) -> bool: ... + def freeze(self) -> None: ... + @overload + def __getitem__(self, i: int) -> _T: ... + @overload + def __getitem__(self, s: slice) -> FrozenList[_T]: ... + @overload + def __setitem__(self, i: int, o: _T) -> None: ... + @overload + def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... + @overload + def __delitem__(self, i: int) -> None: ... + @overload + def __delitem__(self, i: slice) -> None: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T]: ... + def __reversed__(self) -> Iterator[_T]: ... + def __eq__(self, other: object) -> bool: ... + def __le__(self, other: FrozenList[_T]) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __lt__(self, other: FrozenList[_T]) -> bool: ... + def __ge__(self, other: FrozenList[_T]) -> bool: ... + def __gt__(self, other: FrozenList[_T]) -> bool: ... + def insert(self, pos: int, item: _T) -> None: ... + def __repr__(self) -> str: ... + def __hash__(self) -> int: ... + +# types for C accelerators are the same +CFrozenList = PyFrozenList = FrozenList diff --git a/venv/lib/python3.10/site-packages/frozenlist/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/frozenlist/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..20ab3cd Binary files /dev/null and b/venv/lib/python3.10/site-packages/frozenlist/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/frozenlist/_frozenlist.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/frozenlist/_frozenlist.cpython-310-x86_64-linux-gnu.so new file mode 100755 index 0000000..f0fb87e Binary files /dev/null and b/venv/lib/python3.10/site-packages/frozenlist/_frozenlist.cpython-310-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.10/site-packages/frozenlist/_frozenlist.pyx b/venv/lib/python3.10/site-packages/frozenlist/_frozenlist.pyx new file mode 100644 index 0000000..9ee846c --- /dev/null +++ b/venv/lib/python3.10/site-packages/frozenlist/_frozenlist.pyx @@ -0,0 +1,123 @@ +import sys +import types +from collections.abc import MutableSequence + + +cdef class FrozenList: + + if sys.version_info >= (3, 9): + __class_getitem__ = classmethod(types.GenericAlias) + else: + @classmethod + def __class_getitem__(cls): + return cls + + cdef readonly bint frozen + cdef list _items + + def __init__(self, items=None): + self.frozen = False + if items is not None: + items = list(items) + else: + items = [] + self._items = items + + cdef object _check_frozen(self): + if self.frozen: + raise RuntimeError("Cannot modify frozen list.") + + cdef inline object _fast_len(self): + return len(self._items) + + def freeze(self): + self.frozen = True + + def __getitem__(self, index): + return self._items[index] + + def __setitem__(self, index, value): + self._check_frozen() + self._items[index] = value + + def __delitem__(self, index): + self._check_frozen() + del self._items[index] + + def __len__(self): + return self._fast_len() + + def __iter__(self): + return self._items.__iter__() + + def __reversed__(self): + return self._items.__reversed__() + + def __richcmp__(self, other, op): + if op == 0: # < + return list(self) < other + if op == 1: # <= + return list(self) <= other + if op == 2: # == + return list(self) == other + if op == 3: # != + return list(self) != other + if op == 4: # > + return list(self) > other + if op == 5: # => + return list(self) >= other + + def insert(self, pos, item): + self._check_frozen() + self._items.insert(pos, item) + + def __contains__(self, item): + return item in self._items + + def __iadd__(self, items): + self._check_frozen() + self._items += list(items) + return self + + def index(self, item): + return self._items.index(item) + + def remove(self, item): + self._check_frozen() + self._items.remove(item) + + def clear(self): + self._check_frozen() + self._items.clear() + + def extend(self, items): + self._check_frozen() + self._items += list(items) + + def reverse(self): + self._check_frozen() + self._items.reverse() + + def pop(self, index=-1): + self._check_frozen() + return self._items.pop(index) + + def append(self, item): + self._check_frozen() + return self._items.append(item) + + def count(self, item): + return self._items.count(item) + + def __repr__(self): + return ''.format(self.frozen, + self._items) + + def __hash__(self): + if self.frozen: + return hash(tuple(self._items)) + else: + raise RuntimeError("Cannot hash unfrozen list.") + + +MutableSequence.register(FrozenList) diff --git a/venv/lib/python3.10/site-packages/frozenlist/py.typed b/venv/lib/python3.10/site-packages/frozenlist/py.typed new file mode 100644 index 0000000..f5642f7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/frozenlist/py.typed @@ -0,0 +1 @@ +Marker diff --git a/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/AUTHORS b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/AUTHORS new file mode 100644 index 0000000..42a5c22 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/AUTHORS @@ -0,0 +1,51 @@ +Original Authors +---------------- +* Armin Rigo +* Christian Tismer + +Contributors +------------ +* Al Stone +* Alexander Schmidt +* Alexey Borzenkov +* Andreas Schwab +* Armin Ronacher +* Bin Wang +* Bob Ippolito +* ChangBo Guo +* Christoph Gohlke +* Denis Bilenko +* Dirk Mueller +* Donovan Preston +* Fantix King +* Floris Bruynooghe +* Fredrik Fornwall +* Gerd Woetzel +* Giel van Schijndel +* Gökhan Karabulut +* Gustavo Niemeyer +* Guy Rozendorn +* Hye-Shik Chang +* Jared Kuolt +* Jason Madden +* Josh Snyder +* Kyle Ambroff +* Laszlo Boszormenyi +* Mao Han +* Marc Abramowitz +* Marc Schlaich +* Marcin Bachry +* Matt Madison +* Matt Turner +* Michael Ellerman +* Michael Matz +* Ralf Schmitt +* Robie Basak +* Ronny Pfannschmidt +* Samual M. Rushing +* Tony Bowles +* Tony Breeds +* Trevor Bowen +* Tulio Magno Quites Machado Filho +* Ulrich Weigand +* Victor Stinner diff --git a/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/LICENSE b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/LICENSE new file mode 100644 index 0000000..b73a4a1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/LICENSE @@ -0,0 +1,30 @@ +The following files are derived from Stackless Python and are subject to the +same license as Stackless Python: + + src/greenlet/slp_platformselect.h + files in src/greenlet/platform/ directory + +See LICENSE.PSF and http://www.stackless.com/ for details. + +Unless otherwise noted, the files in greenlet have been released under the +following MIT license: + +Copyright (c) Armin Rigo, Christian Tismer and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/LICENSE.PSF b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/LICENSE.PSF new file mode 100644 index 0000000..d3b509a --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/LICENSE.PSF @@ -0,0 +1,47 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011 Python Software Foundation; All Rights Reserved" are retained in Python +alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/METADATA b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/METADATA new file mode 100644 index 0000000..5e63425 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/METADATA @@ -0,0 +1,104 @@ +Metadata-Version: 2.1 +Name: greenlet +Version: 1.1.2 +Summary: Lightweight in-process concurrent programming +Home-page: https://greenlet.readthedocs.io/ +Author: Alexey Borzenkov +Author-email: snaury@gmail.com +Maintainer: Jason Madden +Maintainer-email: jason@nextthought.com +License: MIT License +Project-URL: Bug Tracker, https://github.com/python-greenlet/greenlet/issues +Project-URL: Source Code, https://github.com/python-greenlet/greenlet/ +Project-URL: Documentation, https://greenlet.readthedocs.io/ +Keywords: greenlet coroutine concurrency threads cooperative +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Natural Language :: English +Classifier: Programming Language :: C +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Operating System :: OS Independent +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.* +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.PSF +License-File: AUTHORS +Provides-Extra: docs +Requires-Dist: Sphinx ; extra == 'docs' +Provides-Extra: test + +.. This file is included into docs/history.rst + +.. image:: https://github.com/python-greenlet/greenlet/workflows/tests/badge.svg + :target: https://github.com/python-greenlet/greenlet/actions + +Greenlets are lightweight coroutines for in-process concurrent +programming. + +The "greenlet" package is a spin-off of `Stackless`_, a version of +CPython that supports micro-threads called "tasklets". Tasklets run +pseudo-concurrently (typically in a single or a few OS-level threads) +and are synchronized with data exchanges on "channels". + +A "greenlet", on the other hand, is a still more primitive notion of +micro-thread with no implicit scheduling; coroutines, in other words. +This is useful when you want to control exactly when your code runs. +You can build custom scheduled micro-threads on top of greenlet; +however, it seems that greenlets are useful on their own as a way to +make advanced control flow structures. For example, we can recreate +generators; the difference with Python's own generators is that our +generators can call nested functions and the nested functions can +yield values too. (Additionally, you don't need a "yield" keyword. See +the example in `test_generator.py +`_). + +Greenlets are provided as a C extension module for the regular unmodified +interpreter. + +.. _`Stackless`: http://www.stackless.com + + +Who is using Greenlet? +====================== + +There are several libraries that use Greenlet as a more flexible +alternative to Python's built in coroutine support: + + - `Concurrence`_ + - `Eventlet`_ + - `Gevent`_ + +.. _Concurrence: http://opensource.hyves.org/concurrence/ +.. _Eventlet: http://eventlet.net/ +.. _Gevent: http://www.gevent.org/ + +Getting Greenlet +================ + +The easiest way to get Greenlet is to install it with pip:: + + pip install greenlet + + +Source code archives and binary distributions are vailable on the +python package index at https://pypi.org/project/greenlet + +The source code repository is hosted on github: +https://github.com/python-greenlet/greenlet + +Documentation is available on readthedocs.org: +https://greenlet.readthedocs.io + + diff --git a/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/RECORD b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/RECORD new file mode 100644 index 0000000..b90e6fb --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/RECORD @@ -0,0 +1,71 @@ +../../../include/site/python3.10/greenlet/greenlet.h,sha256=63uaNbRd8ebE-dysD_SY2GwqbdRam2qSeSPfnaUNn6E,4245 +greenlet-1.1.2.dist-info/AUTHORS,sha256=swW28t2knVRxRkaEQNZtO7MP9Sgnompb7B6cNgJM8Gk,849 +greenlet-1.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +greenlet-1.1.2.dist-info/LICENSE,sha256=dpgx1uXfrywggC-sz_H6-0wgJd2PYlPfpH_K1Z1NCXk,1434 +greenlet-1.1.2.dist-info/LICENSE.PSF,sha256=5f88I8EQ5JTNfXNsEP2W1GJFe6_soxCEDbZScpjH1Gs,2424 +greenlet-1.1.2.dist-info/METADATA,sha256=S4WiBvPsmvRVJ-q1z6_ZDdJdUzx1Vp6_N65Aq2hZqqQ,3881 +greenlet-1.1.2.dist-info/RECORD,, +greenlet-1.1.2.dist-info/WHEEL,sha256=6B0vZ-Dd34LpGZN_4Y_GbBSl1fSb7keGXRzuHUFcOBA,152 +greenlet-1.1.2.dist-info/top_level.txt,sha256=YSnRsCRoO61JGlP57o8iKL6rdLWDWuiyKD8ekpWUsDc,9 +greenlet/__init__.py,sha256=ACsxf4vNBr4ZkgU2z11nJpRkGFZ3Lj_K_sDECUGSYtw,1323 +greenlet/__pycache__/__init__.cpython-310.pyc,, +greenlet/_greenlet.cpython-310-x86_64-linux-gnu.so,sha256=O9pfZKJn5g5i6H_xQt4fMsMf0LbbuY52SsQxX8rbOy0,133144 +greenlet/greenlet.c,sha256=lPwBXTSuKlXHjrBVKTNYUy6VMYLXO_8Jq32RyYrVlC8,65996 +greenlet/greenlet.h,sha256=63uaNbRd8ebE-dysD_SY2GwqbdRam2qSeSPfnaUNn6E,4245 +greenlet/platform/setup_switch_x64_masm.cmd,sha256=ZpClUJeU0ujEPSTWNSepP0W2f9XiYQKA8QKSoVou8EU,143 +greenlet/platform/switch_aarch64_gcc.h,sha256=TRH22e9TNRA_mys8hhLbNwz3efZk7BtKZhyhK7ucgyM,2385 +greenlet/platform/switch_alpha_unix.h,sha256=T6kOBiHy3hLmy1vrmFrxbnOnRu0EJkoG_yuWy7fykZ4,689 +greenlet/platform/switch_amd64_unix.h,sha256=KWB4PB2wcAaWvWbMzcq8tYBe02vEGPBCRMnHnfeI7gE,2610 +greenlet/platform/switch_arm32_gcc.h,sha256=wflI2cGZBfLzM_GGgYx3OrFeoOq7OTsJP53dKLsrxS0,2488 +greenlet/platform/switch_arm32_ios.h,sha256=yQZXCa0AZbyAIS9tKceyTCrRYlihpFBKDbiPCn_3im0,1901 +greenlet/platform/switch_csky_gcc.h,sha256=GHlaVXrzQuSkrDqgL7-Ji9YwZnprpFhjPznNyp0NnvU,1340 +greenlet/platform/switch_m68k_gcc.h,sha256=VSa6NpZhvyyvF-Q58CTIWSpEDo4FKygOyTz00whctlw,928 +greenlet/platform/switch_mips_unix.h,sha256=9ptMGEBXafee15RxOm5NrxiC2bEnwM9AkxJ7ktVatU8,1444 +greenlet/platform/switch_ppc64_aix.h,sha256=ADpifLPlr6pTdT76bt6ozcqPjHrfPsJ93lQfc1VNaug,3878 +greenlet/platform/switch_ppc64_linux.h,sha256=jqPKpTg09FzmCn59Kt6OJi2-40aoazFVJcf1YETLlwA,3833 +greenlet/platform/switch_ppc_aix.h,sha256=nClVVlsRlFAI-I3fmivSJyJK7Xzx3_8l3Wf8QNJ9FMU,2959 +greenlet/platform/switch_ppc_linux.h,sha256=J4eKMA73WbPYSaq0yAedzHB6J6ZKE8tIIzkqYxlaA2c,2777 +greenlet/platform/switch_ppc_macosx.h,sha256=bnL2MqIUm9--NHizb5NYijvSrqutvuJx4auYCdqXllM,2642 +greenlet/platform/switch_ppc_unix.h,sha256=5UW9c71NGJh6xksEbAOButBFH168QRyZ5O53yXdXGxg,2670 +greenlet/platform/switch_riscv_unix.h,sha256=c3v3GRDMooslDKQLM75IqokWivtelbAj3-XZK31vWlE,758 +greenlet/platform/switch_s390_unix.h,sha256=9oJkYnyUovPvXOAsVLXoj-Unl_Rr_DidkXYMaRXLS0w,2781 +greenlet/platform/switch_sparc_sun_gcc.h,sha256=0vHXNNCdz-1ioQsw-OtK0ridnBVIzErYWiK7bBu6OgM,2815 +greenlet/platform/switch_x32_unix.h,sha256=ie7Nxo6Cf_x4UVOSA_a3bJYPlRKZ1BvLWsclyQle_SY,1527 +greenlet/platform/switch_x64_masm.asm,sha256=nu6n2sWyXuXfpPx40d9YmLfHXUc1sHgeTvX1kUzuvEM,1841 +greenlet/platform/switch_x64_masm.obj,sha256=GNtTNxYdo7idFUYsQv-mrXWgyT5EJ93-9q90lN6svtQ,1078 +greenlet/platform/switch_x64_msvc.h,sha256=LIeasyKo_vHzspdMzMHbosRhrBfKI4BkQOh4qcTHyJw,1805 +greenlet/platform/switch_x86_msvc.h,sha256=hi0dgp-k14IhMCxwtJtcI_ciPnMGd37uMnMaHaeQVWg,2481 +greenlet/platform/switch_x86_unix.h,sha256=WvY2sNMFIEfoFVNVakl-osygJui3pSnlVj5jBrdaU08,3068 +greenlet/slp_platformselect.h,sha256=-J5Px9Yk7Ths4hQTecC3iadxfte1CYaFoeqfg1lUl-A,3095 +greenlet/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +greenlet/tests/__pycache__/__init__.cpython-310.pyc,, +greenlet/tests/__pycache__/test_contextvars.cpython-310.pyc,, +greenlet/tests/__pycache__/test_cpp.cpython-310.pyc,, +greenlet/tests/__pycache__/test_extension_interface.cpython-310.pyc,, +greenlet/tests/__pycache__/test_gc.cpython-310.pyc,, +greenlet/tests/__pycache__/test_generator.cpython-310.pyc,, +greenlet/tests/__pycache__/test_generator_nested.cpython-310.pyc,, +greenlet/tests/__pycache__/test_greenlet.cpython-310.pyc,, +greenlet/tests/__pycache__/test_leaks.cpython-310.pyc,, +greenlet/tests/__pycache__/test_stack_saved.cpython-310.pyc,, +greenlet/tests/__pycache__/test_throw.cpython-310.pyc,, +greenlet/tests/__pycache__/test_tracing.cpython-310.pyc,, +greenlet/tests/__pycache__/test_version.cpython-310.pyc,, +greenlet/tests/__pycache__/test_weakref.cpython-310.pyc,, +greenlet/tests/_test_extension.c,sha256=Tceb6kMFPSvAPW2LJ_zUlj--Wz_DtLzIPmgZcqkqAEU,5402 +greenlet/tests/_test_extension.cpython-310-x86_64-linux-gnu.so,sha256=l84szlydqoldTpkqTCCPWWJcjasC4Cn6TvB4u34yKTA,36152 +greenlet/tests/_test_extension_cpp.cpp,sha256=zKfz0FxBXicq-53rItZ_NP8M406OBtyQFdH5bv_pRmk,3212 +greenlet/tests/_test_extension_cpp.cpython-310-x86_64-linux-gnu.so,sha256=s6Baz0HK7NQHCLXLmLLp7Z-h8__KPY9QCgXTNJkvR40,47704 +greenlet/tests/test_contextvars.py,sha256=d69XSuRrdU80xAPmzdObLjrjXnbTQChG0MgsvBF_nGM,9205 +greenlet/tests/test_cpp.py,sha256=SXMuqsHTYTxFPBrasdbx5Sgplc89wvYEuPZvwafD-3k,488 +greenlet/tests/test_extension_interface.py,sha256=1FhUkxL-NrxmQV_sxUdlt8tvIWpDcGi27JcdQ6VyvFc,2521 +greenlet/tests/test_gc.py,sha256=oATPCmEAagdf1dZBYfZ0aiDklovLo_pQt5HZNTygCzk,2892 +greenlet/tests/test_generator.py,sha256=_MLDA1kBtZQR-9a74AOZZQECQCIFljMa7vbucE0cOxw,1280 +greenlet/tests/test_generator_nested.py,sha256=pGYRpNn_WjdhY_5ZHHBuBw10wskG_7mjJjR8IqleY3M,3579 +greenlet/tests/test_greenlet.py,sha256=SVDi0e1RrJtJhiOFggmoWTZL1sFdxRpdALFRCie-n60,23427 +greenlet/tests/test_leaks.py,sha256=STvFoZsFsZ_E24kYFaIASGBx97TRgTIur6uJXnoevWc,6677 +greenlet/tests/test_stack_saved.py,sha256=SyIHZycTBfm1TxFsq1VLCAgVm02t5GSke8tT28qwi7c,450 +greenlet/tests/test_throw.py,sha256=OOWfgcEaymvGVJQ3d4xDGzC5IVH0rZAiazWuyZV9270,2755 +greenlet/tests/test_tracing.py,sha256=hZ6Cl5NMq9IaeH7NGqWYl8aQ0_5nFUSYuo6TeSXvrKw,7455 +greenlet/tests/test_version.py,sha256=lHDe3qcLvfsOHcFKFW8yrcl5wBvy6UIxaNkZZzNlpHE,1229 +greenlet/tests/test_weakref.py,sha256=gqAQunjVzbwF6qEUZijhv6UqhH4apWNIRHeoWLUo9tM,884 diff --git a/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/WHEEL b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/WHEEL new file mode 100644 index 0000000..df5f4fa --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/top_level.txt new file mode 100644 index 0000000..46725be --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet-1.1.2.dist-info/top_level.txt @@ -0,0 +1 @@ +greenlet diff --git a/venv/lib/python3.10/site-packages/greenlet/__init__.py b/venv/lib/python3.10/site-packages/greenlet/__init__.py new file mode 100644 index 0000000..896f007 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/__init__.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +""" +The root of the greenlet package. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +__all__ = [ + '__version__', + '_C_API', + + 'GreenletExit', + 'error', + + 'getcurrent', + 'greenlet', + + 'gettrace', + 'settrace', +] + +# pylint:disable=no-name-in-module + +### +# Metadata +### +__version__ = '1.1.2' +from ._greenlet import _C_API # pylint:disable=no-name-in-module + +### +# Exceptions +### +from ._greenlet import GreenletExit +from ._greenlet import error + +### +# greenlets +### +from ._greenlet import getcurrent +from ._greenlet import greenlet + +### +# tracing +### +try: + from ._greenlet import gettrace + from ._greenlet import settrace +except ImportError: + # Tracing wasn't supported. + # XXX: The option to disable it was removed in 1.0, + # so this branch should be dead code. + pass + +### +# Constants +# These constants aren't documented and aren't recommended. +# In 1.0, USE_GC and USE_TRACING are always true, and USE_CONTEXT_VARS +# is the same as ``sys.version_info[:2] >= 3.7`` +### +from ._greenlet import GREENLET_USE_CONTEXT_VARS # pylint:disable=unused-import +from ._greenlet import GREENLET_USE_GC # pylint:disable=unused-import +from ._greenlet import GREENLET_USE_TRACING # pylint:disable=unused-import diff --git a/venv/lib/python3.10/site-packages/greenlet/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..04dc615 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/_greenlet.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/greenlet/_greenlet.cpython-310-x86_64-linux-gnu.so new file mode 100755 index 0000000..0738c97 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/_greenlet.cpython-310-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.10/site-packages/greenlet/greenlet.c b/venv/lib/python3.10/site-packages/greenlet/greenlet.c new file mode 100644 index 0000000..f47bbf8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/greenlet.c @@ -0,0 +1,2135 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/* Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#define GREENLET_MODULE + +#include "greenlet.h" + +#include "structmember.h" + +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-parameter" +# pragma clang diagnostic ignored "-Wmissing-field-initializers" +#endif + +/*********************************************************** + +A PyGreenlet is a range of C stack addresses that must be +saved and restored in such a way that the full range of the +stack contains valid data when we switch to it. + +Stack layout for a greenlet: + + | ^^^ | + | older data | + | | + stack_stop . |_______________| + . | | + . | greenlet data | + . | in stack | + . * |_______________| . . _____________ stack_copy + stack_saved + . | | | | + . | data | |greenlet data| + . | unrelated | | saved | + . | to | | in heap | + stack_start . | this | . . |_____________| stack_copy + | greenlet | + | | + | newer data | + | vvv | + + +Note that a greenlet's stack data is typically partly at its correct +place in the stack, and partly saved away in the heap, but always in +the above configuration: two blocks, the more recent one in the heap +and the older one still in the stack (either block may be empty). + +Greenlets are chained: each points to the previous greenlet, which is +the one that owns the data currently in the C stack above my +stack_stop. The currently running greenlet is the first element of +this chain. The main (initial) greenlet is the last one. Greenlets +whose stack is entirely in the heap can be skipped from the chain. + +The chain is not related to execution order, but only to the order +in which bits of C stack happen to belong to greenlets at a particular +point in time. + +The main greenlet doesn't have a stack_stop: it is responsible for the +complete rest of the C stack, and we don't know where it begins. We +use (char*) -1, the largest possible address. + +States: + stack_stop == NULL && stack_start == NULL: did not start yet + stack_stop != NULL && stack_start == NULL: already finished + stack_stop != NULL && stack_start != NULL: active + +The running greenlet's stack_start is undefined but not NULL. + + ***********************************************************/ + +/*** global state ***/ + +/* In the presence of multithreading, this is a bit tricky: + + - ts_current always store a reference to a greenlet, but it is + not really the current greenlet after a thread switch occurred. + + - each *running* greenlet uses its run_info field to know which + thread it is attached to. A greenlet can only run in the thread + where it was created. This run_info is a ref to tstate->dict. + + - the thread state dict is used to save and restore ts_current, + using the dictionary key 'ts_curkey'. +*/ + +extern PyTypeObject PyGreenlet_Type; + +#if PY_VERSION_HEX >= 0x030700A3 +# define GREENLET_PY37 1 +#else +# define GREENLET_PY37 0 +#endif + +#if PY_VERSION_HEX >= 0x30A00B1 +/* +Python 3.10 beta 1 changed tstate->use_tracing to a nested cframe member. +See https://github.com/python/cpython/pull/25276 +We have to save and restore this as well. +*/ +#define TSTATE_USE_TRACING(tstate) (tstate->cframe->use_tracing) +#define GREENLET_USE_CFRAME 1 +#else +#define TSTATE_USE_TRACING(tstate) (tstate->use_tracing) +#define GREENLET_USE_CFRAME 0 +#endif + +#ifndef Py_SET_REFCNT +/* Py_REFCNT and Py_SIZE macros are converted to functions +https://bugs.python.org/issue39573 */ +# define Py_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) +#endif + +#ifndef _Py_DEC_REFTOTAL +/* _Py_DEC_REFTOTAL macro has been removed from Python 3.9 by: + https://github.com/python/cpython/commit/49932fec62c616ec88da52642339d83ae719e924 +*/ +# ifdef Py_REF_DEBUG +# define _Py_DEC_REFTOTAL _Py_RefTotal-- +# else +# define _Py_DEC_REFTOTAL +# endif +#endif + +/* Weak reference to the switching-to greenlet during the slp switch */ +static PyGreenlet* volatile ts_target = NULL; +/* Strong reference to the switching from greenlet after the switch */ +static PyGreenlet* volatile ts_origin = NULL; +/* Strong reference to the current greenlet in this thread state */ +static PyGreenlet* volatile ts_current = NULL; +/* NULL if error, otherwise args tuple to pass around during slp switch */ +static PyObject* volatile ts_passaround_args = NULL; +static PyObject* volatile ts_passaround_kwargs = NULL; + +/* Used internally in ``g_switchstack()`` */ +#if GREENLET_USE_CFRAME +static int volatile ts__g_switchstack_use_tracing = 0; +#endif + +/***********************************************************/ +/* Thread-aware routines, switching global variables when needed */ + +#define STATE_OK \ + (ts_current->run_info == PyThreadState_GET()->dict || \ + !green_updatecurrent()) + +static PyObject* ts_curkey; +static PyObject* ts_delkey; +static PyObject* ts_tracekey; +static PyObject* ts_event_switch; +static PyObject* ts_event_throw; +static PyObject* PyExc_GreenletError; +static PyObject* PyExc_GreenletExit; +static PyObject* ts_empty_tuple; +static PyObject* ts_empty_dict; + +#define GREENLET_GC_FLAGS Py_TPFLAGS_HAVE_GC +#define GREENLET_tp_alloc PyType_GenericAlloc +#define GREENLET_tp_free PyObject_GC_Del +#define GREENLET_tp_traverse green_traverse +#define GREENLET_tp_clear green_clear +#define GREENLET_tp_is_gc green_is_gc + +static void +green_clear_exc(PyGreenlet* g) +{ +#if GREENLET_PY37 + g->exc_info = NULL; + g->exc_state.exc_type = NULL; + g->exc_state.exc_value = NULL; + g->exc_state.exc_traceback = NULL; + g->exc_state.previous_item = NULL; +#else + g->exc_type = NULL; + g->exc_value = NULL; + g->exc_traceback = NULL; +#endif +} + +static PyGreenlet* +green_create_main(void) +{ + PyGreenlet* gmain; + PyObject* dict = PyThreadState_GetDict(); + if (dict == NULL) { + if (!PyErr_Occurred()) { + PyErr_NoMemory(); + } + return NULL; + } + + /* create the main greenlet for this thread */ + gmain = (PyGreenlet*)PyType_GenericAlloc(&PyGreenlet_Type, 0); + if (gmain == NULL) { + return NULL; + } + gmain->stack_start = (char*)1; + gmain->stack_stop = (char*)-1; + /* GetDict() returns a borrowed reference. Make it strong. */ + gmain->run_info = dict; + Py_INCREF(dict); + return gmain; +} + +static int +green_updatecurrent(void) +{ + PyObject *exc, *val, *tb; + PyThreadState* tstate; + PyGreenlet* current; + PyGreenlet* previous; + PyObject* deleteme; + +green_updatecurrent_restart: + /* save current exception */ + PyErr_Fetch(&exc, &val, &tb); + + /* get ts_current from the active tstate */ + tstate = PyThreadState_GET(); + if (tstate->dict && + (current = (PyGreenlet*)PyDict_GetItem(tstate->dict, ts_curkey))) { + /* found -- remove it, to avoid keeping a ref */ + Py_INCREF(current); + PyDict_DelItem(tstate->dict, ts_curkey); + } + else { + /* first time we see this tstate */ + current = green_create_main(); + if (current == NULL) { + Py_XDECREF(exc); + Py_XDECREF(val); + Py_XDECREF(tb); + return -1; + } + } + assert(current->run_info == tstate->dict); + +green_updatecurrent_retry: + /* update ts_current as soon as possible, in case of nested switches */ + Py_INCREF(current); + previous = ts_current; + ts_current = current; + + /* save ts_current as the current greenlet of its own thread */ + if (PyDict_SetItem(previous->run_info, ts_curkey, (PyObject*)previous)) { + Py_DECREF(previous); + Py_DECREF(current); + Py_XDECREF(exc); + Py_XDECREF(val); + Py_XDECREF(tb); + return -1; + } + Py_DECREF(previous); + + /* green_dealloc() cannot delete greenlets from other threads, so + it stores them in the thread dict; delete them now. */ + deleteme = PyDict_GetItem(tstate->dict, ts_delkey); + if (deleteme != NULL) { + /* The only reference to these greenlets should be in this list, so + clearing the list should let them be deleted again, triggering + calls to green_dealloc() in the correct thread. This may run + arbitrary Python code? + */ + PyList_SetSlice(deleteme, 0, INT_MAX, NULL); + } + + if (ts_current != current) { + /* some Python code executed above and there was a thread switch, + * so ts_current points to some other thread again. We need to + * delete ts_curkey (it's likely there) and retry. */ + PyDict_DelItem(tstate->dict, ts_curkey); + goto green_updatecurrent_retry; + } + + /* release an extra reference */ + Py_DECREF(current); + /* restore current exception */ + PyErr_Restore(exc, val, tb); + + /* thread switch could happen during PyErr_Restore, in that + case there's nothing to do except restart from scratch. */ + if (ts_current->run_info != tstate->dict) { + goto green_updatecurrent_restart; + } + return 0; +} + +static PyObject* +green_statedict(PyGreenlet* g) +{ + while (!PyGreenlet_STARTED(g)) { + g = g->parent; + if (g == NULL) { + /* garbage collected greenlet in chain */ + return NULL; + } + } + return g->run_info; +} + +/***********************************************************/ + +/* Some functions must not be inlined: + * slp_restore_state, when inlined into slp_switch might cause + it to restore stack over its own local variables + * slp_save_state, when inlined would add its own local + variables to the saved stack, wasting space + * slp_switch, cannot be inlined for obvious reasons + * g_initialstub, when inlined would receive a pointer into its + own stack frame, leading to incomplete stack save/restore +*/ + +#if defined(__GNUC__) && \ + (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define GREENLET_NOINLINE_SUPPORTED +# define GREENLET_NOINLINE(name) __attribute__((noinline)) name +#elif defined(_MSC_VER) && (_MSC_VER >= 1300) +# define GREENLET_NOINLINE_SUPPORTED +# define GREENLET_NOINLINE(name) __declspec(noinline) name +#endif + +#ifdef GREENLET_NOINLINE_SUPPORTED +/* add forward declarations */ +static void GREENLET_NOINLINE(slp_restore_state)(void); +static int GREENLET_NOINLINE(slp_save_state)(char*); +# if !(defined(MS_WIN64) && defined(_M_X64)) +static int GREENLET_NOINLINE(slp_switch)(void); +# endif +static int GREENLET_NOINLINE(g_initialstub)(void*); +# define GREENLET_NOINLINE_INIT() \ + do { \ + } while (0) +#else +/* force compiler to call functions via pointers */ +static void (*slp_restore_state)(void); +static int (*slp_save_state)(char*); +static int (*slp_switch)(void); +static int (*g_initialstub)(void*); +# define GREENLET_NOINLINE(name) cannot_inline_##name +# define GREENLET_NOINLINE_INIT() \ + do { \ + slp_restore_state = GREENLET_NOINLINE(slp_restore_state); \ + slp_save_state = GREENLET_NOINLINE(slp_save_state); \ + slp_switch = GREENLET_NOINLINE(slp_switch); \ + g_initialstub = GREENLET_NOINLINE(g_initialstub); \ + } while (0) +#endif + +/* + * the following macros are spliced into the OS/compiler + * specific code, in order to simplify maintenance. + */ + +#define SLP_SAVE_STATE(stackref, stsizediff) \ + stackref += STACK_MAGIC; \ + if (slp_save_state((char*)stackref)) \ + return -1; \ + if (!PyGreenlet_ACTIVE(ts_target)) \ + return 1; \ + stsizediff = ts_target->stack_start - (char*)stackref + +#define SLP_RESTORE_STATE() slp_restore_state() + +#define SLP_EVAL +#define slp_switch GREENLET_NOINLINE(slp_switch) +#include "slp_platformselect.h" +#undef slp_switch + +#ifndef STACK_MAGIC +# error \ + "greenlet needs to be ported to this platform, or taught how to detect your compiler properly." +#endif /* !STACK_MAGIC */ + +#ifdef EXTERNAL_ASM +/* CCP addition: Make these functions, to be called from assembler. + * The token include file for the given platform should enable the + * EXTERNAL_ASM define so that this is included. + */ + +intptr_t +slp_save_state_asm(intptr_t* ref) +{ + intptr_t diff; + SLP_SAVE_STATE(ref, diff); + return diff; +} + +void +slp_restore_state_asm(void) +{ + SLP_RESTORE_STATE(); +} + +extern int +slp_switch(void); + +#endif + +/***********************************************************/ + +static int +g_save(PyGreenlet* g, char* stop) +{ + /* Save more of g's stack into the heap -- at least up to 'stop' + + g->stack_stop |________| + | | + | __ stop . . . . . + | | ==> . . + |________| _______ + | | | | + | | | | + g->stack_start | | |_______| g->stack_copy + + */ + intptr_t sz1 = g->stack_saved; + intptr_t sz2 = stop - g->stack_start; + assert(g->stack_start != NULL); + if (sz2 > sz1) { + char* c = (char*)PyMem_Realloc(g->stack_copy, sz2); + if (!c) { + PyErr_NoMemory(); + return -1; + } + memcpy(c + sz1, g->stack_start + sz1, sz2 - sz1); + g->stack_copy = c; + g->stack_saved = sz2; + } + return 0; +} + +static void GREENLET_NOINLINE(slp_restore_state)(void) +{ + PyGreenlet* g = ts_target; + PyGreenlet* owner = ts_current; + +#ifdef SLP_BEFORE_RESTORE_STATE + SLP_BEFORE_RESTORE_STATE(); +#endif + + /* Restore the heap copy back into the C stack */ + if (g->stack_saved != 0) { + memcpy(g->stack_start, g->stack_copy, g->stack_saved); + PyMem_Free(g->stack_copy); + g->stack_copy = NULL; + g->stack_saved = 0; + } + if (owner->stack_start == NULL) { + owner = owner->stack_prev; /* greenlet is dying, skip it */ + } + while (owner && owner->stack_stop <= g->stack_stop) { + owner = owner->stack_prev; /* find greenlet with more stack */ + } + g->stack_prev = owner; +} + +static int GREENLET_NOINLINE(slp_save_state)(char* stackref) +{ + /* must free all the C stack up to target_stop */ + char* target_stop = ts_target->stack_stop; + PyGreenlet* owner = ts_current; + assert(owner->stack_saved == 0); + if (owner->stack_start == NULL) { + owner = owner->stack_prev; /* not saved if dying */ + } + else { + owner->stack_start = stackref; + } + +#ifdef SLP_BEFORE_SAVE_STATE + SLP_BEFORE_SAVE_STATE(); +#endif + + while (owner->stack_stop < target_stop) { + /* ts_current is entierely within the area to free */ + if (g_save(owner, owner->stack_stop)) { + return -1; /* XXX */ + } + owner = owner->stack_prev; + } + if (owner != ts_target) { + if (g_save(owner, target_stop)) { + return -1; /* XXX */ + } + } + return 0; +} + +/** + Perform a stack switch according to some global variables + that must be set before calling this function. Those variables + are: + + - ts_current: current greenlet (holds a reference) + - ts_target: greenlet to switch to (weak reference) + - ts_passaround_args: NULL if PyErr_Occurred(), + else a tuple of args sent to ts_target (holds a reference) + - ts_passaround_kwargs: switch kwargs (holds a reference) + + Because the stack switch happens in this function, this function can't use + its own stack (local) variables, set before the switch, and then accessed after the + switch. Global variables beginning with ``ts__g_switchstack`` are used + internally instead. + + On return results are passed via global variables as well: + + - ts_origin: originating greenlet (holds a reference) + - ts_current: current greenlet (holds a reference) + - ts_passaround_args: NULL if PyErr_Occurred(), + else a tuple of args sent to ts_current (holds a reference) + - ts_passaround_kwargs: switch kwargs (holds a reference) + + It is very important that stack switch is 'atomic', i.e. no + calls into other Python code allowed (except very few that + are safe), because global variables are very fragile. +*/ +static int +g_switchstack(void) +{ + int err; + { /* save state */ + PyGreenlet* current = ts_current; + PyThreadState* tstate = PyThreadState_GET(); + current->recursion_depth = tstate->recursion_depth; + current->top_frame = tstate->frame; +#if GREENLET_PY37 + current->context = tstate->context; +#endif +#if GREENLET_PY37 + current->exc_info = tstate->exc_info; + current->exc_state = tstate->exc_state; +#else + current->exc_type = tstate->exc_type; + current->exc_value = tstate->exc_value; + current->exc_traceback = tstate->exc_traceback; +#endif +#if GREENLET_USE_CFRAME + /* + IMPORTANT: ``cframe`` is a pointer into the STACK. + Thus, because the call to ``slp_switch()`` + changes the contents of the stack, you cannot read from + ``ts_current->cframe`` after that call and necessarily + get the same values you get from reading it here. Anything + you need to restore from now to then must be saved + in a global variable (because we can't use stack variables + here either). + */ + current->cframe = tstate->cframe; + ts__g_switchstack_use_tracing = tstate->cframe->use_tracing; +#endif + } + + err = slp_switch(); + + if (err < 0) { /* error */ + PyGreenlet* current = ts_current; + current->top_frame = NULL; +#if GREENLET_PY37 + green_clear_exc(current); +#else + current->exc_type = NULL; + current->exc_value = NULL; + current->exc_traceback = NULL; +#endif + + assert(ts_origin == NULL); + ts_target = NULL; + } + else { + PyGreenlet* target = ts_target; + PyGreenlet* origin = ts_current; + PyThreadState* tstate = PyThreadState_GET(); + tstate->recursion_depth = target->recursion_depth; + tstate->frame = target->top_frame; + target->top_frame = NULL; + +#if GREENLET_PY37 + tstate->context = target->context; + target->context = NULL; + /* Incrementing this value invalidates the contextvars cache, + which would otherwise remain valid across switches */ + tstate->context_ver++; +#endif + +#if GREENLET_PY37 + tstate->exc_state = target->exc_state; + tstate->exc_info = + target->exc_info ? target->exc_info : &tstate->exc_state; +#else + tstate->exc_type = target->exc_type; + tstate->exc_value = target->exc_value; + tstate->exc_traceback = target->exc_traceback; +#endif + green_clear_exc(target); + +#if GREENLET_USE_CFRAME + tstate->cframe = target->cframe; + /* + If we were tracing, we need to keep tracing. + There should never be the possibility of hitting the + root_cframe here. See note above about why we can't + just copy this from ``origin->cframe->use_tracing``. + */ + tstate->cframe->use_tracing = ts__g_switchstack_use_tracing; +#endif + + assert(ts_origin == NULL); + Py_INCREF(target); + ts_current = target; + ts_origin = origin; + ts_target = NULL; + } + return err; +} + +static int +g_calltrace(PyObject* tracefunc, PyObject* event, PyGreenlet* origin, + PyGreenlet* target) +{ + PyObject* retval; + PyObject *exc_type, *exc_val, *exc_tb; + PyThreadState* tstate; + PyErr_Fetch(&exc_type, &exc_val, &exc_tb); + tstate = PyThreadState_GET(); + tstate->tracing++; + TSTATE_USE_TRACING(tstate) = 0; + retval = PyObject_CallFunction(tracefunc, "O(OO)", event, origin, target); + tstate->tracing--; + TSTATE_USE_TRACING(tstate) = + (tstate->tracing <= 0 && + ((tstate->c_tracefunc != NULL) || (tstate->c_profilefunc != NULL))); + if (retval == NULL) { + /* In case of exceptions trace function is removed */ + if (PyDict_GetItem(tstate->dict, ts_tracekey)) { + PyDict_DelItem(tstate->dict, ts_tracekey); + } + Py_XDECREF(exc_type); + Py_XDECREF(exc_val); + Py_XDECREF(exc_tb); + return -1; + } + else { + Py_DECREF(retval); + } + PyErr_Restore(exc_type, exc_val, exc_tb); + return 0; +} + +static PyObject* +g_switch(PyGreenlet* target, PyObject* args, PyObject* kwargs) +{ + /* _consumes_ a reference to the args tuple and kwargs dict, + and return a new tuple reference */ + int err = 0; + PyObject* run_info; + + /* check ts_current */ + if (!STATE_OK) { + Py_XDECREF(args); + Py_XDECREF(kwargs); + return NULL; + } + run_info = green_statedict(target); + if (run_info == NULL || run_info != ts_current->run_info) { + Py_XDECREF(args); + Py_XDECREF(kwargs); + PyErr_SetString(PyExc_GreenletError, + run_info ? + "cannot switch to a different thread" : + "cannot switch to a garbage collected greenlet"); + return NULL; + } + + ts_passaround_args = args; + ts_passaround_kwargs = kwargs; + + /* find the real target by ignoring dead greenlets, + and if necessary starting a greenlet. */ + while (target) { + if (PyGreenlet_ACTIVE(target)) { + ts_target = target; + err = g_switchstack(); + break; + } + if (!PyGreenlet_STARTED(target)) { + void* dummymarker; + ts_target = target; + err = g_initialstub(&dummymarker); + if (err == 1) { + continue; /* retry the switch */ + } + break; + } + target = target->parent; + } + + /* For a very short time, immediately after the 'atomic' + g_switchstack() call, global variables are in a known state. + We need to save everything we need, before it is destroyed + by calls into arbitrary Python code. */ + args = ts_passaround_args; + ts_passaround_args = NULL; + kwargs = ts_passaround_kwargs; + ts_passaround_kwargs = NULL; + if (err < 0) { + /* Turn switch errors into switch throws */ + assert(ts_origin == NULL); + Py_CLEAR(kwargs); + Py_CLEAR(args); + } + else { + PyGreenlet* origin; + PyGreenlet* current; + PyObject* tracefunc; + origin = ts_origin; + ts_origin = NULL; + current = ts_current; + if ((tracefunc = PyDict_GetItem(current->run_info, ts_tracekey)) != NULL) { + Py_INCREF(tracefunc); + if (g_calltrace(tracefunc, + args ? ts_event_switch : ts_event_throw, + origin, + current) < 0) { + /* Turn trace errors into switch throws */ + Py_CLEAR(kwargs); + Py_CLEAR(args); + } + Py_DECREF(tracefunc); + } + + Py_DECREF(origin); + } + + /* We need to figure out what values to pass to the target greenlet + based on the arguments that have been passed to greenlet.switch(). If + switch() was just passed an arg tuple, then we'll just return that. + If only keyword arguments were passed, then we'll pass the keyword + argument dict. Otherwise, we'll create a tuple of (args, kwargs) and + return both. */ + if (kwargs == NULL) { + return args; + } + else if (PyDict_Size(kwargs) == 0) { + Py_DECREF(kwargs); + return args; + } + else if (PySequence_Length(args) == 0) { + Py_DECREF(args); + return kwargs; + } + else { + PyObject* tuple = PyTuple_New(2); + if (tuple == NULL) { + Py_DECREF(args); + Py_DECREF(kwargs); + return NULL; + } + PyTuple_SET_ITEM(tuple, 0, args); + PyTuple_SET_ITEM(tuple, 1, kwargs); + return tuple; + } +} + +static PyObject* +g_handle_exit(PyObject* result) +{ + if (result == NULL && PyErr_ExceptionMatches(PyExc_GreenletExit)) { + /* catch and ignore GreenletExit */ + PyObject *exc, *val, *tb; + PyErr_Fetch(&exc, &val, &tb); + if (val == NULL) { + Py_INCREF(Py_None); + val = Py_None; + } + result = val; + Py_DECREF(exc); + Py_XDECREF(tb); + } + if (result != NULL) { + /* package the result into a 1-tuple */ + PyObject* r = result; + result = PyTuple_New(1); + if (result) { + PyTuple_SET_ITEM(result, 0, r); + } + else { + Py_DECREF(r); + } + } + return result; +} + +static int GREENLET_NOINLINE(g_initialstub)(void* mark) +{ + int err; + PyObject *o, *run; + PyObject *exc, *val, *tb; + PyObject* run_info; + PyGreenlet* self = ts_target; + PyObject* args = ts_passaround_args; + PyObject* kwargs = ts_passaround_kwargs; +#if GREENLET_USE_CFRAME + /* + See green_new(). This is a stack-allocated variable used + while *self* is in PyObject_Call(). + We want to defer copying the state info until we're sure + we need it and are in a stable place to do so. + */ + CFrame trace_info; +#endif + /* save exception in case getattr clears it */ + PyErr_Fetch(&exc, &val, &tb); + /* self.run is the object to call in the new greenlet */ + run = PyObject_GetAttrString((PyObject*)self, "run"); + if (run == NULL) { + Py_XDECREF(exc); + Py_XDECREF(val); + Py_XDECREF(tb); + return -1; + } + /* restore saved exception */ + PyErr_Restore(exc, val, tb); + + /* recheck the state in case getattr caused thread switches */ + if (!STATE_OK) { + Py_DECREF(run); + return -1; + } + + /* recheck run_info in case greenlet reparented anywhere above */ + run_info = green_statedict(self); + if (run_info == NULL || run_info != ts_current->run_info) { + Py_DECREF(run); + PyErr_SetString(PyExc_GreenletError, + run_info ? + "cannot switch to a different thread" : + "cannot switch to a garbage collected greenlet"); + return -1; + } + + /* by the time we got here another start could happen elsewhere, + * that means it should now be a regular switch + */ + if (PyGreenlet_STARTED(self)) { + Py_DECREF(run); + ts_passaround_args = args; + ts_passaround_kwargs = kwargs; + return 1; + } + +#if GREENLET_USE_CFRAME + /* OK, we need it, we're about to switch greenlets, save the state. */ + trace_info = *PyThreadState_GET()->cframe; + /* Make the target greenlet refer to the stack value. */ + self->cframe = &trace_info; + /* + And restore the link to the previous frame so this one gets + unliked appropriately. + */ + self->cframe->previous = &PyThreadState_GET()->root_cframe; +#endif + /* start the greenlet */ + self->stack_start = NULL; + self->stack_stop = (char*)mark; + if (ts_current->stack_start == NULL) { + /* ts_current is dying */ + self->stack_prev = ts_current->stack_prev; + } + else { + self->stack_prev = ts_current; + } + self->top_frame = NULL; + green_clear_exc(self); + self->recursion_depth = PyThreadState_GET()->recursion_depth; + + /* restore arguments in case they are clobbered */ + ts_target = self; + ts_passaround_args = args; + ts_passaround_kwargs = kwargs; + + /* perform the initial switch */ + err = g_switchstack(); + + /* returns twice! + The 1st time with ``err == 1``: we are in the new greenlet + The 2nd time with ``err <= 0``: back in the caller's greenlet + */ + if (err == 1) { + /* in the new greenlet */ + PyGreenlet* origin; + PyObject* tracefunc; + PyObject* result; + PyGreenlet* parent; + self->stack_start = (char*)1; /* running */ + + /* grab origin while we still can */ + origin = ts_origin; + ts_origin = NULL; + + /* now use run_info to store the statedict */ + o = self->run_info; + self->run_info = green_statedict(self->parent); + Py_INCREF(self->run_info); + Py_XDECREF(o); + + if ((tracefunc = PyDict_GetItem(self->run_info, ts_tracekey)) != NULL) { + Py_INCREF(tracefunc); + if (g_calltrace(tracefunc, + args ? ts_event_switch : ts_event_throw, + origin, + self) < 0) { + /* Turn trace errors into switch throws */ + Py_CLEAR(kwargs); + Py_CLEAR(args); + } + Py_DECREF(tracefunc); + } + + Py_DECREF(origin); + + if (args == NULL) { + /* pending exception */ + result = NULL; + } + else { + /* call g.run(*args, **kwargs) */ + result = PyObject_Call(run, args, kwargs); + Py_DECREF(args); + Py_XDECREF(kwargs); + } + Py_DECREF(run); + result = g_handle_exit(result); + + /* jump back to parent */ + self->stack_start = NULL; /* dead */ + for (parent = self->parent; parent != NULL; parent = parent->parent) { + result = g_switch(parent, result, NULL); + /* Return here means switch to parent failed, + * in which case we throw *current* exception + * to the next parent in chain. + */ + assert(result == NULL); + } + /* We ran out of parents, cannot continue */ + PyErr_WriteUnraisable((PyObject*)self); + Py_FatalError("greenlets cannot continue"); + } + /* back in the parent */ + if (err < 0) { + /* start failed badly, restore greenlet state */ + self->stack_start = NULL; + self->stack_stop = NULL; + self->stack_prev = NULL; + } + return err; +} + +/***********************************************************/ + +static PyObject* +green_new(PyTypeObject* type, PyObject* args, PyObject* kwds) +{ + PyObject* o = + PyBaseObject_Type.tp_new(type, ts_empty_tuple, ts_empty_dict); + if (o != NULL) { + if (!STATE_OK) { + Py_DECREF(o); + return NULL; + } + Py_INCREF(ts_current); + ((PyGreenlet*)o)->parent = ts_current; +#if GREENLET_USE_CFRAME + /* + The PyThreadState->cframe pointer usually points to memory on the + stack, alloceted in a call into PyEval_EvalFrameDefault. + + Initially, before any evaluation begins, it points to the initial + PyThreadState object's ``root_cframe`` object, which is statically + allocated for the lifetime of the thread. + + A greenlet can last for longer than a call to + PyEval_EvalFrameDefault, so we can't set its ``cframe`` pointer to + be the current ``PyThreadState->cframe``; nor could we use one from + the greenlet parent for the same reason. Yet a further no: we can't + allocate one scoped to the greenlet and then destroy it when the + greenlet is deallocated, because inside the interpreter the CFrame + objects form a linked list, and that too can result in accessing + memory beyond its dynamic lifetime (if the greenlet doesn't actually + finish before it dies, its entry could still be in the list). + + Using the ``root_cframe`` is problematic, though, because its + members are never modified by the interpreter and are set to 0, + meaning that its ``use_tracing`` flag is never updated. We don't + want to modify that value in the ``root_cframe`` ourself: it + *shouldn't* matter much because we should probably never get back to + the point where that's the only cframe on the stack; even if it did + matter, the major consequence of an incorrect value for + ``use_tracing`` is that if its true the interpreter does some extra + work --- however, it's just good code hygiene. + + Our solution: before a greenlet runs, after its initial creation, + it uses the ``root_cframe`` just to have something to put there. + However, once the greenlet is actually switched to for the first + time, ``g_initialstub`` (which doesn't actually "return" while the + greenlet is running) stores a new CFrame on its local stack, and + copies the appropriate values from the currently running CFrame; + this is then made the CFrame for the newly-minted greenlet. + ``g_initialstub`` then proceeds to call ``glet.run()``, which + results in ``PyEval_...`` adding the CFrame to the list. Switches + continue as normal. Finally, when the greenlet finishes, the call to + ``glet.run()`` returns and the CFrame is taken out of the linked + list and the stack value is now unused and free to expire. + */ + ((PyGreenlet*)o)->cframe = &PyThreadState_GET()->root_cframe; +#endif + } + return o; +} + +static int +green_setrun(PyGreenlet* self, PyObject* nrun, void* c); +static int +green_setparent(PyGreenlet* self, PyObject* nparent, void* c); + +static int +green_init(PyGreenlet* self, PyObject* args, PyObject* kwargs) +{ + PyObject* run = NULL; + PyObject* nparent = NULL; + static char* kwlist[] = {"run", "parent", 0}; + if (!PyArg_ParseTupleAndKeywords( + args, kwargs, "|OO:green", kwlist, &run, &nparent)) { + return -1; + } + + if (run != NULL) { + if (green_setrun(self, run, NULL)) { + return -1; + } + } + if (nparent != NULL && nparent != Py_None) { + return green_setparent(self, nparent, NULL); + } + return 0; +} + +static int +kill_greenlet(PyGreenlet* self) +{ + /* Cannot raise an exception to kill the greenlet if + it is not running in the same thread! */ + if (self->run_info == PyThreadState_GET()->dict) { + /* The dying greenlet cannot be a parent of ts_current + because the 'parent' field chain would hold a + reference */ + PyObject* result; + PyGreenlet* oldparent; + PyGreenlet* tmp; + if (!STATE_OK) { + return -1; + } + oldparent = self->parent; + self->parent = ts_current; + Py_INCREF(self->parent); + /* Send the greenlet a GreenletExit exception. */ + PyErr_SetNone(PyExc_GreenletExit); + result = g_switch(self, NULL, NULL); + tmp = self->parent; + self->parent = oldparent; + Py_XDECREF(tmp); + if (result == NULL) { + return -1; + } + Py_DECREF(result); + return 0; + } + else { + /* Not the same thread! Temporarily save the greenlet + into its thread's ts_delkey list. */ + PyObject* lst; + lst = PyDict_GetItem(self->run_info, ts_delkey); + if (lst == NULL) { + lst = PyList_New(0); + if (lst == NULL + || PyDict_SetItem(self->run_info, ts_delkey, lst) < 0) { + return -1; + } + /* PyDict_SetItem now holds a strong reference. PyList_New also + returned a fresh reference. We need to DECREF it now and let + the dictionary keep sole ownership. Frow now on, we're working + with a borrowed reference that will go away when the thread + dies. */ + Py_DECREF(lst); + } + if (PyList_Append(lst, (PyObject*)self) < 0) { + return -1; + } + if (!STATE_OK) { /* to force ts_delkey to be reconsidered */ + return -1; + } + return 0; + } +} + +static int +green_traverse(PyGreenlet* self, visitproc visit, void* arg) +{ + /* We must only visit referenced objects, i.e. only objects + Py_INCREF'ed by this greenlet (directly or indirectly): + - stack_prev is not visited: holds previous stack pointer, but it's not + referenced + - frames are not visited: alive greenlets are not garbage collected + anyway */ + Py_VISIT((PyObject*)self->parent); + Py_VISIT(self->run_info); +#if GREENLET_PY37 + Py_VISIT(self->context); +#endif +#if GREENLET_PY37 + Py_VISIT(self->exc_state.exc_type); + Py_VISIT(self->exc_state.exc_value); + Py_VISIT(self->exc_state.exc_traceback); +#else + Py_VISIT(self->exc_type); + Py_VISIT(self->exc_value); + Py_VISIT(self->exc_traceback); +#endif + Py_VISIT(self->dict); + return 0; +} + +static int +green_is_gc(PyGreenlet* self) +{ + /* Main greenlet can be garbage collected since it can only + become unreachable if the underlying thread exited. + Active greenlet cannot be garbage collected, however. */ + if (PyGreenlet_MAIN(self) || !PyGreenlet_ACTIVE(self)) { + return 1; + } + return 0; +} + +static int +green_clear(PyGreenlet* self) +{ + /* Greenlet is only cleared if it is about to be collected. + Since active greenlets are not garbage collectable, we can + be sure that, even if they are deallocated during clear, + nothing they reference is in unreachable or finalizers, + so even if it switches we are relatively safe. */ + Py_CLEAR(self->parent); + Py_CLEAR(self->run_info); +#if GREENLET_PY37 + Py_CLEAR(self->context); +#endif +#if GREENLET_PY37 + Py_CLEAR(self->exc_state.exc_type); + Py_CLEAR(self->exc_state.exc_value); + Py_CLEAR(self->exc_state.exc_traceback); +#else + Py_CLEAR(self->exc_type); + Py_CLEAR(self->exc_value); + Py_CLEAR(self->exc_traceback); +#endif + Py_CLEAR(self->dict); + return 0; +} + +static void +green_dealloc(PyGreenlet* self) +{ + PyObject *error_type, *error_value, *error_traceback; + Py_ssize_t refcnt; + + PyObject_GC_UnTrack(self); + + if (PyGreenlet_ACTIVE(self) && self->run_info != NULL && + !PyGreenlet_MAIN(self)) { + /* Hacks hacks hacks copied from instance_dealloc() */ + /* Temporarily resurrect the greenlet. */ + assert(Py_REFCNT(self) == 0); + Py_SET_REFCNT(self, 1); + /* Save the current exception, if any. */ + PyErr_Fetch(&error_type, &error_value, &error_traceback); + if (kill_greenlet(self) < 0) { + PyErr_WriteUnraisable((PyObject*)self); + /* XXX what else should we do? */ + } + /* Check for no resurrection must be done while we keep + * our internal reference, otherwise PyFile_WriteObject + * causes recursion if using Py_INCREF/Py_DECREF + */ + if (Py_REFCNT(self) == 1 && PyGreenlet_ACTIVE(self)) { + /* Not resurrected, but still not dead! + XXX what else should we do? we complain. */ + PyObject* f = PySys_GetObject("stderr"); + Py_INCREF(self); /* leak! */ + if (f != NULL) { + PyFile_WriteString("GreenletExit did not kill ", f); + PyFile_WriteObject((PyObject*)self, f, 0); + PyFile_WriteString("\n", f); + } + } + /* Restore the saved exception. */ + PyErr_Restore(error_type, error_value, error_traceback); + /* Undo the temporary resurrection; can't use DECREF here, + * it would cause a recursive call. + */ + assert(Py_REFCNT(self) > 0); + + refcnt = Py_REFCNT(self) - 1; + Py_SET_REFCNT(self, refcnt); + if (refcnt != 0) { + /* Resurrected! */ + _Py_NewReference((PyObject*)self); + Py_SET_REFCNT(self, refcnt); + /* Better to use tp_finalizer slot (PEP 442) + * and call ``PyObject_CallFinalizerFromDealloc``, + * but that's only supported in Python 3.4+; see + * Modules/_io/iobase.c for an example. + * + * The following approach is copied from iobase.c in CPython 2.7. + * (along with much of this function in general). Here's their + * comment: + * + * When called from a heap type's dealloc, the type will be + * decref'ed on return (see e.g. subtype_dealloc in typeobject.c). */ + if (PyType_HasFeature(Py_TYPE(self), Py_TPFLAGS_HEAPTYPE)) { + Py_INCREF(Py_TYPE(self)); + } + + PyObject_GC_Track((PyObject*)self); + + _Py_DEC_REFTOTAL; +#ifdef COUNT_ALLOCS + --Py_TYPE(self)->tp_frees; + --Py_TYPE(self)->tp_allocs; +#endif /* COUNT_ALLOCS */ + return; + } + } + if (self->weakreflist != NULL) { + PyObject_ClearWeakRefs((PyObject*)self); + } + Py_CLEAR(self->parent); + Py_CLEAR(self->run_info); +#if GREENLET_PY37 + Py_CLEAR(self->context); +#endif +#if GREENLET_PY37 + Py_CLEAR(self->exc_state.exc_type); + Py_CLEAR(self->exc_state.exc_value); + Py_CLEAR(self->exc_state.exc_traceback); +#else + Py_CLEAR(self->exc_type); + Py_CLEAR(self->exc_value); + Py_CLEAR(self->exc_traceback); +#endif + Py_CLEAR(self->dict); + Py_TYPE(self)->tp_free((PyObject*)self); +} + +static PyObject* +single_result(PyObject* results) +{ + if (results != NULL && PyTuple_Check(results) && + PyTuple_GET_SIZE(results) == 1) { + PyObject* result = PyTuple_GET_ITEM(results, 0); + Py_INCREF(result); + Py_DECREF(results); + return result; + } + else { + return results; + } +} + +static PyObject* +throw_greenlet(PyGreenlet* self, PyObject* typ, PyObject* val, PyObject* tb) +{ + /* Note: _consumes_ a reference to typ, val, tb */ + PyObject* result = NULL; + PyErr_Restore(typ, val, tb); + if (PyGreenlet_STARTED(self) && !PyGreenlet_ACTIVE(self)) { + /* dead greenlet: turn GreenletExit into a regular return */ + result = g_handle_exit(result); + } + return single_result(g_switch(self, result, NULL)); +} + +PyDoc_STRVAR( + green_switch_doc, + "switch(*args, **kwargs)\n" + "\n" + "Switch execution to this greenlet.\n" + "\n" + "If this greenlet has never been run, then this greenlet\n" + "will be switched to using the body of ``self.run(*args, **kwargs)``.\n" + "\n" + "If the greenlet is active (has been run, but was switch()'ed\n" + "out before leaving its run function), then this greenlet will\n" + "be resumed and the return value to its switch call will be\n" + "None if no arguments are given, the given argument if one\n" + "argument is given, or the args tuple and keyword args dict if\n" + "multiple arguments are given.\n" + "\n" + "If the greenlet is dead, or is the current greenlet then this\n" + "function will simply return the arguments using the same rules as\n" + "above.\n"); + +static PyObject* +green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs) +{ + Py_INCREF(args); + Py_XINCREF(kwargs); + return single_result(g_switch(self, args, kwargs)); +} + +PyDoc_STRVAR( + green_throw_doc, + "Switches execution to this greenlet, but immediately raises the\n" + "given exception in this greenlet. If no argument is provided, the " + "exception\n" + "defaults to `greenlet.GreenletExit`. The normal exception\n" + "propagation rules apply, as described for `switch`. Note that calling " + "this\n" + "method is almost equivalent to the following::\n" + "\n" + " def raiser():\n" + " raise typ, val, tb\n" + " g_raiser = greenlet(raiser, parent=g)\n" + " g_raiser.switch()\n" + "\n" + "except that this trick does not work for the\n" + "`greenlet.GreenletExit` exception, which would not propagate\n" + "from ``g_raiser`` to ``g``.\n"); + +static PyObject* +green_throw(PyGreenlet* self, PyObject* args) +{ + PyObject* typ = PyExc_GreenletExit; + PyObject* val = NULL; + PyObject* tb = NULL; + + if (!PyArg_ParseTuple(args, "|OOO:throw", &typ, &val, &tb)) { + return NULL; + } + + /* First, check the traceback argument, replacing None, with NULL */ + if (tb == Py_None) { + tb = NULL; + } + else if (tb != NULL && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "throw() third argument must be a traceback object"); + return NULL; + } + + Py_INCREF(typ); + Py_XINCREF(val); + Py_XINCREF(tb); + + if (PyExceptionClass_Check(typ)) { + PyErr_NormalizeException(&typ, &val, &tb); + } + else if (PyExceptionInstance_Check(typ)) { + /* Raising an instance. The value should be a dummy. */ + if (val && val != Py_None) { + PyErr_SetString( + PyExc_TypeError, + "instance exception may not have a separate value"); + goto failed_throw; + } + else { + /* Normalize to raise , */ + Py_XDECREF(val); + val = typ; + typ = PyExceptionInstance_Class(typ); + Py_INCREF(typ); + } + } + else { + /* Not something you can raise. throw() fails. */ + PyErr_Format(PyExc_TypeError, + "exceptions must be classes, or instances, not %s", + Py_TYPE(typ)->tp_name); + goto failed_throw; + } + + return throw_greenlet(self, typ, val, tb); + +failed_throw: + /* Didn't use our arguments, so restore their original refcounts */ + Py_DECREF(typ); + Py_XDECREF(val); + Py_XDECREF(tb); + return NULL; +} + +static int +green_bool(PyGreenlet* self) +{ + return PyGreenlet_ACTIVE(self); +} + +static PyObject* +green_getdict(PyGreenlet* self, void* c) +{ + if (self->dict == NULL) { + self->dict = PyDict_New(); + if (self->dict == NULL) { + return NULL; + } + } + Py_INCREF(self->dict); + return self->dict; +} + +static int +green_setdict(PyGreenlet* self, PyObject* val, void* c) +{ + PyObject* tmp; + + if (val == NULL) { + PyErr_SetString(PyExc_TypeError, "__dict__ may not be deleted"); + return -1; + } + if (!PyDict_Check(val)) { + PyErr_SetString(PyExc_TypeError, "__dict__ must be a dictionary"); + return -1; + } + tmp = self->dict; + Py_INCREF(val); + self->dict = val; + Py_XDECREF(tmp); + return 0; +} + +static int +_green_not_dead(PyGreenlet* self) +{ + return PyGreenlet_ACTIVE(self) || !PyGreenlet_STARTED(self); +} + + +static PyObject* +green_getdead(PyGreenlet* self, void* c) +{ + if (_green_not_dead(self)) { + Py_RETURN_FALSE; + } + else { + Py_RETURN_TRUE; + } +} + +static PyObject* +green_get_stack_saved(PyGreenlet* self, void* c) +{ + return PyLong_FromSsize_t(self->stack_saved); +} + +static PyObject* +green_getrun(PyGreenlet* self, void* c) +{ + if (PyGreenlet_STARTED(self) || self->run_info == NULL) { + PyErr_SetString(PyExc_AttributeError, "run"); + return NULL; + } + Py_INCREF(self->run_info); + return self->run_info; +} + +static int +green_setrun(PyGreenlet* self, PyObject* nrun, void* c) +{ + PyObject* o; + if (PyGreenlet_STARTED(self)) { + PyErr_SetString(PyExc_AttributeError, + "run cannot be set " + "after the start of the greenlet"); + return -1; + } + o = self->run_info; + self->run_info = nrun; + Py_XINCREF(nrun); + Py_XDECREF(o); + return 0; +} + +static PyObject* +green_getparent(PyGreenlet* self, void* c) +{ + PyObject* result = self->parent ? (PyObject*)self->parent : Py_None; + Py_INCREF(result); + return result; +} + +static int +green_setparent(PyGreenlet* self, PyObject* nparent, void* c) +{ + PyGreenlet* p; + PyObject* run_info = NULL; + if (nparent == NULL) { + PyErr_SetString(PyExc_AttributeError, "can't delete attribute"); + return -1; + } + if (!PyGreenlet_Check(nparent)) { + PyErr_SetString(PyExc_TypeError, "parent must be a greenlet"); + return -1; + } + for (p = (PyGreenlet*)nparent; p; p = p->parent) { + if (p == self) { + PyErr_SetString(PyExc_ValueError, "cyclic parent chain"); + return -1; + } + run_info = PyGreenlet_ACTIVE(p) ? p->run_info : NULL; + } + if (run_info == NULL) { + PyErr_SetString(PyExc_ValueError, + "parent must not be garbage collected"); + return -1; + } + if (PyGreenlet_STARTED(self) && self->run_info != run_info) { + PyErr_SetString(PyExc_ValueError, + "parent cannot be on a different thread"); + return -1; + } + p = self->parent; + self->parent = (PyGreenlet*)nparent; + Py_INCREF(nparent); + Py_XDECREF(p); + return 0; +} + +#ifdef Py_CONTEXT_H +# define GREENLET_NO_CONTEXTVARS_REASON "This build of greenlet" +#else +# define GREENLET_NO_CONTEXTVARS_REASON "This Python interpreter" +#endif + +static PyObject* +green_getcontext(PyGreenlet* self, void* c) +{ +#if GREENLET_PY37 + PyThreadState* tstate = PyThreadState_GET(); + PyObject* result; + + if (!STATE_OK) { + return NULL; + } + if (PyGreenlet_ACTIVE(self) && self->top_frame == NULL) { + /* Currently running greenlet: context is stored in the thread state, + not the greenlet object. */ + if (self == ts_current) { + result = tstate->context; + } + else { + PyErr_SetString(PyExc_ValueError, + "cannot get context of a " + "greenlet that is running in a different thread"); + return NULL; + } + } + else { + /* Greenlet is not running: just return context. */ + result = self->context; + } + if (result == NULL) { + result = Py_None; + } + Py_INCREF(result); + return result; +#else + PyErr_SetString(PyExc_AttributeError, + GREENLET_NO_CONTEXTVARS_REASON + " does not support context variables"); + return NULL; +#endif +} + +static int +green_setcontext(PyGreenlet* self, PyObject* nctx, void* c) +{ +#if GREENLET_PY37 + PyThreadState* tstate; + PyObject* octx = NULL; + if (!STATE_OK) { + return -1; + } + if (nctx == NULL) { + PyErr_SetString(PyExc_AttributeError, "can't delete attribute"); + return -1; + } + if (nctx == Py_None) { + /* "Empty context" is stored as NULL, not None. */ + nctx = NULL; + } + else if (!PyContext_CheckExact(nctx)) { + PyErr_SetString(PyExc_TypeError, + "greenlet context must be a " + "contextvars.Context or None"); + return -1; + } + tstate = PyThreadState_GET(); + if (PyGreenlet_ACTIVE(self) && self->top_frame == NULL) { + /* Currently running greenlet: context is stored in the thread state, + not the greenlet object. */ + if (self == ts_current) { + octx = tstate->context; + tstate->context = nctx; + tstate->context_ver++; + Py_XINCREF(nctx); + } + else { + PyErr_SetString(PyExc_ValueError, + "cannot set context of a " + "greenlet that is running in a different thread"); + return -1; + } + } + else { + /* Greenlet is not running: just set context. */ + octx = self->context; + self->context = nctx; + Py_XINCREF(nctx); + } + Py_XDECREF(octx); + return 0; +#else + PyErr_SetString(PyExc_AttributeError, + GREENLET_NO_CONTEXTVARS_REASON + " does not support context variables"); + return -1; +#endif +} + +#undef GREENLET_NO_CONTEXTVARS_REASON + +static PyObject* +green_getframe(PyGreenlet* self, void* c) +{ + PyObject* result = self->top_frame ? (PyObject*)self->top_frame : Py_None; + Py_INCREF(result); + return result; +} + +static PyObject* +green_getstate(PyGreenlet* self) +{ + PyErr_Format(PyExc_TypeError, + "cannot serialize '%s' object", + Py_TYPE(self)->tp_name); + return NULL; +} + +static PyObject* +green_repr(PyGreenlet* self) +{ + /* + Return a string like + + + The handling of greenlets across threads is not super good. + We mostly use the internal definitions of these terms, but they + generally should make sense to users as well. + */ + PyObject* result; + int never_started = !PyGreenlet_STARTED(self) && !PyGreenlet_ACTIVE(self); + + if (!STATE_OK) { + return NULL; + } + +#if PY_MAJOR_VERSION >= 3 +# define GNative_FromFormat PyUnicode_FromFormat +#else +# define GNative_FromFormat PyString_FromFormat +#endif + + if (_green_not_dead(self)) { + /* XXX: The otid= is almost useless becasue you can't correlate it to + any thread identifier exposed to Python. We could use + PyThreadState_GET()->thread_id, but we'd need to save that in the + greenlet, or save the whole PyThreadState object itself. + + As it stands, its only useful for identifying greenlets from the same thread. + */ + result = GNative_FromFormat( + "<%s object at %p (otid=%p)%s%s%s%s>", + Py_TYPE(self)->tp_name, + self, + self->run_info, + ts_current == self + ? " current" + : (PyGreenlet_STARTED(self) ? " suspended" : ""), + PyGreenlet_ACTIVE(self) ? " active" : "", + never_started ? " pending" : " started", + PyGreenlet_MAIN(self) ? " main" : "" + ); + } + else { + /* main greenlets never really appear dead. */ + result = GNative_FromFormat( + "<%s object at %p (otid=%p) dead>", + Py_TYPE(self)->tp_name, + self, + self->run_info + ); + } +#undef GNative_FromFormat + + return result; +} + +/***************************************************************************** + * C interface + * + * These are exported using the CObject API + */ + +static PyGreenlet* +PyGreenlet_GetCurrent(void) +{ + if (!STATE_OK) { + return NULL; + } + Py_INCREF(ts_current); + return ts_current; +} + +static int +PyGreenlet_SetParent(PyGreenlet* g, PyGreenlet* nparent) +{ + if (!PyGreenlet_Check(g)) { + PyErr_SetString(PyExc_TypeError, "parent must be a greenlet"); + return -1; + } + + return green_setparent((PyGreenlet*)g, (PyObject*)nparent, NULL); +} + +static PyGreenlet* +PyGreenlet_New(PyObject* run, PyGreenlet* parent) +{ + /* XXX: Why doesn't this call green_new()? There's some duplicate + code. */ + PyGreenlet* g = NULL; + g = (PyGreenlet*)PyType_GenericAlloc(&PyGreenlet_Type, 0); + if (g == NULL) { + return NULL; + } + + if (run != NULL) { + Py_INCREF(run); + g->run_info = run; + } + + if (parent != NULL) { + if (PyGreenlet_SetParent(g, parent)) { + Py_DECREF(g); + return NULL; + } + } + else { + if ((g->parent = PyGreenlet_GetCurrent()) == NULL) { + Py_DECREF(g); + return NULL; + } + } +#if GREENLET_USE_CFRAME + g->cframe = &PyThreadState_GET()->root_cframe; +#endif + return g; +} + +static PyObject* +PyGreenlet_Switch(PyGreenlet* g, PyObject* args, PyObject* kwargs) +{ + PyGreenlet* self = (PyGreenlet*)g; + + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return NULL; + } + + if (args == NULL) { + args = Py_BuildValue("()"); + } + else { + Py_INCREF(args); + } + + if (kwargs != NULL && PyDict_Check(kwargs)) { + Py_INCREF(kwargs); + } + else { + kwargs = NULL; + } + + return single_result(g_switch(self, args, kwargs)); +} + +static PyObject* +PyGreenlet_Throw(PyGreenlet* self, PyObject* typ, PyObject* val, PyObject* tb) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return NULL; + } + Py_INCREF(typ); + Py_XINCREF(val); + Py_XINCREF(tb); + return throw_greenlet(self, typ, val, tb); +} + +/** End C API ****************************************************************/ + +static PyMethodDef green_methods[] = { + {"switch", + (PyCFunction)green_switch, + METH_VARARGS | METH_KEYWORDS, + green_switch_doc}, + {"throw", (PyCFunction)green_throw, METH_VARARGS, green_throw_doc}, + {"__getstate__", (PyCFunction)green_getstate, METH_NOARGS, NULL}, + {NULL, NULL} /* sentinel */ +}; + +static PyGetSetDef green_getsets[] = { + {"__dict__", (getter)green_getdict, (setter)green_setdict, /*XXX*/ NULL}, + {"run", (getter)green_getrun, (setter)green_setrun, /*XXX*/ NULL}, + {"parent", (getter)green_getparent, (setter)green_setparent, /*XXX*/ NULL}, + {"gr_frame", (getter)green_getframe, NULL, /*XXX*/ NULL}, + {"gr_context", + (getter)green_getcontext, + (setter)green_setcontext, + /*XXX*/ NULL}, + {"dead", (getter)green_getdead, NULL, /*XXX*/ NULL}, + {"_stack_saved", (getter)green_get_stack_saved, NULL, /*XXX*/ NULL}, + {NULL}}; + +static PyNumberMethods green_as_number = { + NULL, /* nb_add */ + NULL, /* nb_subtract */ + NULL, /* nb_multiply */ +#if PY_MAJOR_VERSION < 3 + NULL, /* nb_divide */ +#endif + NULL, /* nb_remainder */ + NULL, /* nb_divmod */ + NULL, /* nb_power */ + NULL, /* nb_negative */ + NULL, /* nb_positive */ + NULL, /* nb_absolute */ + (inquiry)green_bool, /* nb_bool */ +}; + +PyTypeObject PyGreenlet_Type = { + PyVarObject_HEAD_INIT(NULL, 0) + "greenlet.greenlet", /* tp_name */ + sizeof(PyGreenlet), /* tp_basicsize */ + 0, /* tp_itemsize */ + /* methods */ + (destructor)green_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + (reprfunc)green_repr, /* tp_repr */ + &green_as_number, /* tp_as _number*/ + 0, /* tp_as _sequence*/ + 0, /* tp_as _mapping*/ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer*/ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + GREENLET_GC_FLAGS, /* tp_flags */ + "greenlet(run=None, parent=None) -> greenlet\n\n" + "Creates a new greenlet object (without running it).\n\n" + " - *run* -- The callable to invoke.\n" + " - *parent* -- The parent greenlet. The default is the current " + "greenlet.", /* tp_doc */ + (traverseproc)GREENLET_tp_traverse, /* tp_traverse */ + (inquiry)GREENLET_tp_clear, /* tp_clear */ + 0, /* tp_richcompare */ + offsetof(PyGreenlet, weakreflist), /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + green_methods, /* tp_methods */ + 0, /* tp_members */ + green_getsets, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + offsetof(PyGreenlet, dict), /* tp_dictoffset */ + (initproc)green_init, /* tp_init */ + GREENLET_tp_alloc, /* tp_alloc */ + green_new, /* tp_new */ + GREENLET_tp_free, /* tp_free */ + (inquiry)GREENLET_tp_is_gc, /* tp_is_gc */ +}; + +PyDoc_STRVAR(mod_getcurrent_doc, + "getcurrent() -> greenlet\n" + "\n" + "Returns the current greenlet (i.e. the one which called this " + "function).\n"); + +static PyObject* +mod_getcurrent(PyObject* self) +{ + if (!STATE_OK) { + return NULL; + } + Py_INCREF(ts_current); + return (PyObject*)ts_current; +} + +PyDoc_STRVAR(mod_settrace_doc, + "settrace(callback) -> object\n" + "\n" + "Sets a new tracing function and returns the previous one.\n"); +static PyObject* +mod_settrace(PyObject* self, PyObject* args) +{ + int err; + PyObject* previous; + PyObject* tracefunc; + PyGreenlet* current; + if (!PyArg_ParseTuple(args, "O", &tracefunc)) { + return NULL; + } + if (!STATE_OK) { + return NULL; + } + current = ts_current; + previous = PyDict_GetItem(current->run_info, ts_tracekey); + if (previous == NULL) { + previous = Py_None; + } + Py_INCREF(previous); + if (tracefunc == Py_None) { + err = previous != Py_None ? + PyDict_DelItem(current->run_info, ts_tracekey) : + 0; + } + else { + err = PyDict_SetItem(current->run_info, ts_tracekey, tracefunc); + } + if (err < 0) { + Py_CLEAR(previous); + } + return previous; +} + +PyDoc_STRVAR(mod_gettrace_doc, + "gettrace() -> object\n" + "\n" + "Returns the currently set tracing function, or None.\n"); + +static PyObject* +mod_gettrace(PyObject* self) +{ + PyObject* tracefunc; + if (!STATE_OK) { + return NULL; + } + tracefunc = PyDict_GetItem(ts_current->run_info, ts_tracekey); + if (tracefunc == NULL) { + tracefunc = Py_None; + } + Py_INCREF(tracefunc); + return tracefunc; +} + +static PyMethodDef GreenMethods[] = { + {"getcurrent", + (PyCFunction)mod_getcurrent, + METH_NOARGS, + mod_getcurrent_doc}, + {"settrace", (PyCFunction)mod_settrace, METH_VARARGS, mod_settrace_doc}, + {"gettrace", (PyCFunction)mod_gettrace, METH_NOARGS, mod_gettrace_doc}, + {NULL, NULL} /* Sentinel */ +}; + +static char* copy_on_greentype[] = { + "getcurrent", "error", "GreenletExit", "settrace", "gettrace", NULL}; + +#if PY_MAJOR_VERSION >= 3 +# define INITERROR return NULL + +static struct PyModuleDef greenlet_module_def = { + PyModuleDef_HEAD_INIT, + "greenlet._greenlet", + NULL, + -1, + GreenMethods, +}; + +PyMODINIT_FUNC +PyInit__greenlet(void) +#else +# define INITERROR return + +PyMODINIT_FUNC +init_greenlet(void) +#endif +{ + PyObject* m = NULL; + char** p = NULL; + PyObject* c_api_object; + static void* _PyGreenlet_API[PyGreenlet_API_pointers]; + + GREENLET_NOINLINE_INIT(); + +#if PY_MAJOR_VERSION >= 3 + m = PyModule_Create(&greenlet_module_def); +#else + m = Py_InitModule("greenlet._greenlet", GreenMethods); +#endif + if (m == NULL) { + INITERROR; + } + +#if PY_MAJOR_VERSION >= 3 +# define Greenlet_Intern PyUnicode_InternFromString +#else +# define Greenlet_Intern PyString_InternFromString +#endif + ts_curkey = Greenlet_Intern("__greenlet_ts_curkey"); + ts_delkey = Greenlet_Intern("__greenlet_ts_delkey"); + ts_tracekey = Greenlet_Intern("__greenlet_ts_tracekey"); + ts_event_switch = Greenlet_Intern("switch"); + ts_event_throw = Greenlet_Intern("throw"); +#undef Greenlet_Intern + + if (ts_curkey == NULL || ts_delkey == NULL) { + INITERROR; + } + if (PyType_Ready(&PyGreenlet_Type) < 0) { + INITERROR; + } + PyExc_GreenletError = PyErr_NewException("greenlet.error", NULL, NULL); + if (PyExc_GreenletError == NULL) { + INITERROR; + } + PyExc_GreenletExit = + PyErr_NewException("greenlet.GreenletExit", PyExc_BaseException, NULL); + if (PyExc_GreenletExit == NULL) { + INITERROR; + } + + ts_empty_tuple = PyTuple_New(0); + if (ts_empty_tuple == NULL) { + INITERROR; + } + + ts_empty_dict = PyDict_New(); + if (ts_empty_dict == NULL) { + INITERROR; + } + + ts_current = green_create_main(); + if (ts_current == NULL) { + INITERROR; + } + + Py_INCREF(&PyGreenlet_Type); + PyModule_AddObject(m, "greenlet", (PyObject*)&PyGreenlet_Type); + Py_INCREF(PyExc_GreenletError); + PyModule_AddObject(m, "error", PyExc_GreenletError); + Py_INCREF(PyExc_GreenletExit); + PyModule_AddObject(m, "GreenletExit", PyExc_GreenletExit); + + PyModule_AddObject(m, "GREENLET_USE_GC", PyBool_FromLong(1)); + PyModule_AddObject(m, "GREENLET_USE_TRACING", PyBool_FromLong(1)); + PyModule_AddObject( + m, "GREENLET_USE_CONTEXT_VARS", PyBool_FromLong(GREENLET_PY37)); + + /* also publish module-level data as attributes of the greentype. */ + /* XXX: Why? */ + for (p = copy_on_greentype; *p; p++) { + PyObject* o = PyObject_GetAttrString(m, *p); + if (!o) { + continue; + } + PyDict_SetItemString(PyGreenlet_Type.tp_dict, *p, o); + Py_DECREF(o); + } + + /* + * Expose C API + */ + + /* types */ + _PyGreenlet_API[PyGreenlet_Type_NUM] = (void*)&PyGreenlet_Type; + + /* exceptions */ + _PyGreenlet_API[PyExc_GreenletError_NUM] = (void*)PyExc_GreenletError; + _PyGreenlet_API[PyExc_GreenletExit_NUM] = (void*)PyExc_GreenletExit; + + /* methods */ + _PyGreenlet_API[PyGreenlet_New_NUM] = (void*)PyGreenlet_New; + _PyGreenlet_API[PyGreenlet_GetCurrent_NUM] = (void*)PyGreenlet_GetCurrent; + _PyGreenlet_API[PyGreenlet_Throw_NUM] = (void*)PyGreenlet_Throw; + _PyGreenlet_API[PyGreenlet_Switch_NUM] = (void*)PyGreenlet_Switch; + _PyGreenlet_API[PyGreenlet_SetParent_NUM] = (void*)PyGreenlet_SetParent; + + /* XXX: Note that our module name is ``greenlet._greenlet``, but for + backwards compatibility with existing C code, we need the _C_API to + be directly in greenlet. + */ + c_api_object = + PyCapsule_New((void*)_PyGreenlet_API, "greenlet._C_API", NULL); + if (c_api_object != NULL) { + PyModule_AddObject(m, "_C_API", c_api_object); + } + +#if PY_MAJOR_VERSION >= 3 + return m; +#endif +} + +#ifdef __clang__ +# pragma clang diagnostic pop +#endif diff --git a/venv/lib/python3.10/site-packages/greenlet/greenlet.h b/venv/lib/python3.10/site-packages/greenlet/greenlet.h new file mode 100644 index 0000000..830bef8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/greenlet.h @@ -0,0 +1,146 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ + +/* Greenlet object interface */ + +#ifndef Py_GREENLETOBJECT_H +#define Py_GREENLETOBJECT_H + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* This is deprecated and undocumented. It does not change. */ +#define GREENLET_VERSION "1.0.0" + +typedef struct _greenlet { + PyObject_HEAD + char* stack_start; + char* stack_stop; + char* stack_copy; + intptr_t stack_saved; + struct _greenlet* stack_prev; + struct _greenlet* parent; + PyObject* run_info; + struct _frame* top_frame; + int recursion_depth; + PyObject* weakreflist; +#if PY_VERSION_HEX >= 0x030700A3 + _PyErr_StackItem* exc_info; + _PyErr_StackItem exc_state; +#else + PyObject* exc_type; + PyObject* exc_value; + PyObject* exc_traceback; +#endif + PyObject* dict; +#if PY_VERSION_HEX >= 0x030700A3 + PyObject* context; +#endif +#if PY_VERSION_HEX >= 0x30A00B1 + CFrame* cframe; +#endif +} PyGreenlet; + +#define PyGreenlet_Check(op) PyObject_TypeCheck(op, &PyGreenlet_Type) +#define PyGreenlet_MAIN(op) (((PyGreenlet*)(op))->stack_stop == (char*)-1) +#define PyGreenlet_STARTED(op) (((PyGreenlet*)(op))->stack_stop != NULL) +#define PyGreenlet_ACTIVE(op) (((PyGreenlet*)(op))->stack_start != NULL) +#define PyGreenlet_GET_PARENT(op) (((PyGreenlet*)(op))->parent) + +/* C API functions */ + +/* Total number of symbols that are exported */ +#define PyGreenlet_API_pointers 8 + +#define PyGreenlet_Type_NUM 0 +#define PyExc_GreenletError_NUM 1 +#define PyExc_GreenletExit_NUM 2 + +#define PyGreenlet_New_NUM 3 +#define PyGreenlet_GetCurrent_NUM 4 +#define PyGreenlet_Throw_NUM 5 +#define PyGreenlet_Switch_NUM 6 +#define PyGreenlet_SetParent_NUM 7 + +#ifndef GREENLET_MODULE +/* This section is used by modules that uses the greenlet C API */ +static void** _PyGreenlet_API = NULL; + +# define PyGreenlet_Type \ + (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) + +# define PyExc_GreenletError \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) + +# define PyExc_GreenletExit \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) + +/* + * PyGreenlet_New(PyObject *args) + * + * greenlet.greenlet(run, parent=None) + */ +# define PyGreenlet_New \ + (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ + _PyGreenlet_API[PyGreenlet_New_NUM]) + +/* + * PyGreenlet_GetCurrent(void) + * + * greenlet.getcurrent() + */ +# define PyGreenlet_GetCurrent \ + (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) + +/* + * PyGreenlet_Throw( + * PyGreenlet *greenlet, + * PyObject *typ, + * PyObject *val, + * PyObject *tb) + * + * g.throw(...) + */ +# define PyGreenlet_Throw \ + (*(PyObject * (*)(PyGreenlet * self, \ + PyObject * typ, \ + PyObject * val, \ + PyObject * tb)) \ + _PyGreenlet_API[PyGreenlet_Throw_NUM]) + +/* + * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) + * + * g.switch(*args, **kwargs) + */ +# define PyGreenlet_Switch \ + (*(PyObject * \ + (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ + _PyGreenlet_API[PyGreenlet_Switch_NUM]) + +/* + * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) + * + * g.parent = new_parent + */ +# define PyGreenlet_SetParent \ + (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ + _PyGreenlet_API[PyGreenlet_SetParent_NUM]) + +/* Macro that imports greenlet and initializes C API */ +/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we + keep the older definition to be sure older code that might have a copy of + the header still works. */ +# define PyGreenlet_Import() \ + { \ + _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ + } + +#endif /* GREENLET_MODULE */ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_GREENLETOBJECT_H */ diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/setup_switch_x64_masm.cmd b/venv/lib/python3.10/site-packages/greenlet/platform/setup_switch_x64_masm.cmd new file mode 100644 index 0000000..0928595 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/setup_switch_x64_masm.cmd @@ -0,0 +1,2 @@ +call "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcvarsall.bat" amd64 +ml64 /nologo /c /Fo switch_x64_masm.obj switch_x64_masm.asm diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_aarch64_gcc.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_aarch64_gcc.h new file mode 100644 index 0000000..0b9d556 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_aarch64_gcc.h @@ -0,0 +1,69 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-16 Add clang support using x register naming. Fredrik Fornwall + * 13-Apr-13 Add support for strange GCC caller-save decisions + * 08-Apr-13 File creation. Michael Matz + * + * NOTES + * + * Simply save all callee saved registers + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REGS_TO_SAVE "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", \ + "x27", "x28", "x30" /* aka lr */, \ + "v8", "v9", "v10", "v11", \ + "v12", "v13", "v14", "v15" + +static int +slp_switch(void) +{ + int err; + void *fp; + register long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("str x29, %0" : "=m"(fp) : : ); + __asm__ ("mov %0, sp" : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add sp,sp,%0\n" + "add x29,x29,%0\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + /* SLP_SAVE_STATE macro contains some return statements + (of -1 and 1). It falls through only when + the return value of slp_save_state() is zero, which + is placed in x0. + In that case we (slp_switch) also want to return zero + (also in x0 of course). + Now, some GCC versions (seen with 4.8) think it's a + good idea to save/restore x0 around the call to + slp_restore_state(), instead of simply zeroing it + at the return below. But slp_restore_state + writes random values to the stack slot used for this + save/restore (from when it once was saved above in + SLP_SAVE_STATE, when it was still uninitialized), so + "restoring" that precious zero actually makes us + return random values. There are some ways to make + GCC not use that zero value in the normal return path + (e.g. making err volatile, but that costs a little + stack space), and the simplest is to call a function + that returns an unknown value (which happens to be zero), + so the saved/restored value is unused. */ + __asm__ volatile ("mov %0, #0" : "=r" (err)); + } + __asm__ volatile ("ldr x29, %0" : : "m" (fp) :); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_alpha_unix.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_alpha_unix.h new file mode 100644 index 0000000..216619f --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_alpha_unix.h @@ -0,0 +1,30 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "$9", "$10", "$11", "$12", "$13", "$14", "$15", \ + "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9" + +static int +slp_switch(void) +{ + register int ret; + register long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov $30, %0" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addq $30, %0, $30\n\t" + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov $31, %0" : "=r" (ret) : ); + return ret; +} + +#endif diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_amd64_unix.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_amd64_unix.h new file mode 100644 index 0000000..16b99b7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_amd64_unix.h @@ -0,0 +1,84 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 3-May-13 Ralf Schmitt + * Add support for strange GCC caller-save decisions + * (ported from switch_aarch64_gcc.h) + * 18-Aug-11 Alexey Borzenkov + * Correctly save rbp, csr and cw + * 01-Apr-04 Hye-Shik Chang + * Ported from i386 to amd64. + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for spark + * 31-Avr-02 Armin Rigo + * Added ebx, esi and edi register-saves. + * 01-Mar-02 Samual M. Rushing + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +/* #define STACK_MAGIC 3 */ +/* the above works fine with gcc 2.96, but 2.95.3 wants this */ +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "r12", "r13", "r14", "r15" + +static int +slp_switch(void) +{ + int err; + void* rbp; + void* rbx; + unsigned int csr; + unsigned short cw; + register long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("stmxcsr %0" : "=m" (csr)); + __asm__ volatile ("movq %%rbp, %0" : "=m" (rbp)); + __asm__ volatile ("movq %%rbx, %0" : "=m" (rbx)); + __asm__ ("movq %%rsp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addq %0, %%rsp\n" + "addq %0, %%rbp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + __asm__ volatile ("xorq %%rax, %%rax" : "=a" (err)); + } + __asm__ volatile ("movq %0, %%rbx" : : "m" (rbx)); + __asm__ volatile ("movq %0, %%rbp" : : "m" (rbp)); + __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_arm32_gcc.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_arm32_gcc.h new file mode 100644 index 0000000..035d6b9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_arm32_gcc.h @@ -0,0 +1,79 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 14-Aug-06 File creation. Ported from Arm Thumb. Sylvain Baro + * 3-Sep-06 Commented out saving of r1-r3 (r4 already commented out) as I + * read that these do not need to be saved. Also added notes and + * errors related to the frame pointer. Richard Tew. + * + * NOTES + * + * It is not possible to detect if fp is used or not, so the supplied + * switch function needs to support it, so that you can remove it if + * it does not apply to you. + * + * POSSIBLE ERRORS + * + * "fp cannot be used in asm here" + * + * - Try commenting out "fp" in REGS_TO_SAVE. + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REG_SP "sp" +#define REG_SPSP "sp,sp" +#ifdef __thumb__ +#define REG_FP "r7" +#define REG_FPFP "r7,r7" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r9", "r10", "r11", "lr" +#else +#define REG_FP "fp" +#define REG_FPFP "fp,fp" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r8", "r9", "r10", "lr" +#endif +#if defined(__SOFTFP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL +#elif defined(__VFP_FP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ + "d12", "d13", "d14", "d15" +#elif defined(__MAVERICK__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "mvf4", "mvf5", "mvf6", "mvf7", \ + "mvf8", "mvf9", "mvf10", "mvf11", \ + "mvf12", "mvf13", "mvf14", "mvf15" +#else +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "f4", "f5", "f6", "f7" +#endif + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + void *fp; + register int *stackref, stsizediff; + int result; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov r0," REG_FP "\n\tstr r0,%0" : "=m" (fp) : : "r0"); + __asm__ ("mov %0," REG_SP : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add " REG_SPSP ",%0\n" + "add " REG_FPFP ",%0\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ldr r0,%1\n\tmov " REG_FP ",r0\n\tmov %0, #0" : "=r" (result) : "m" (fp) : "r0"); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return result; +} + +#endif diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_arm32_ios.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_arm32_ios.h new file mode 100644 index 0000000..e993707 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_arm32_ios.h @@ -0,0 +1,67 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 31-May-15 iOS support. Ported from arm32. Proton + * + * NOTES + * + * It is not possible to detect if fp is used or not, so the supplied + * switch function needs to support it, so that you can remove it if + * it does not apply to you. + * + * POSSIBLE ERRORS + * + * "fp cannot be used in asm here" + * + * - Try commenting out "fp" in REGS_TO_SAVE. + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 +#define REG_SP "sp" +#define REG_SPSP "sp,sp" +#define REG_FP "r7" +#define REG_FPFP "r7,r7" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r10", "r11", "lr" +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ + "d12", "d13", "d14", "d15" + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + void *fp; + register int *stackref, stsizediff, result; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("str " REG_FP ",%0" : "=m" (fp)); + __asm__ ("mov %0," REG_SP : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add " REG_SPSP ",%0\n" + "add " REG_FPFP ",%0\n" + : + : "r" (stsizediff) + : REGS_TO_SAVE /* Clobber registers, force compiler to + * recalculate address of void *fp from REG_SP or REG_FP */ + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ( + "ldr " REG_FP ", %1\n\t" + "mov %0, #0" + : "=r" (result) + : "m" (fp) + : REGS_TO_SAVE /* Force compiler to restore saved registers after this */ + ); + return result; +} + +#endif diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_csky_gcc.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_csky_gcc.h new file mode 100644 index 0000000..7486b94 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_csky_gcc.h @@ -0,0 +1,48 @@ +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REG_FP "r8" +#ifdef __CSKYABIV2__ +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r9", "r10", "r11", "r15",\ + "r16", "r17", "r18", "r19", "r20", "r21", "r22",\ + "r23", "r24", "r25" + +#if defined (__CSKY_HARD_FLOAT__) || (__CSKY_VDSP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "vr8", "vr9", "vr10", "vr11", "vr12",\ + "vr13", "vr14", "vr15" +#else +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL +#endif +#else +#define REGS_TO_SAVE "r9", "r10", "r11", "r12", "r13", "r15" +#endif + + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + register int *stackref, stsizediff; + int result; + + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mov %0, sp" : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addu sp,%0\n" + "addu "REG_FP",%0\n" + : + : "r" (stsizediff) + ); + + SLP_RESTORE_STATE(); + } + __asm__ volatile ("movi %0, 0" : "=r" (result)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + + return result; +} + +#endif diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_m68k_gcc.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_m68k_gcc.h new file mode 100644 index 0000000..da761c2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_m68k_gcc.h @@ -0,0 +1,38 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 2014-01-06 Andreas Schwab + * File created. + */ + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "%d2", "%d3", "%d4", "%d5", "%d6", "%d7", \ + "%a2", "%a3", "%a4" + +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + void *fp, *a5; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("move.l %%fp, %0" : "=m"(fp)); + __asm__ volatile ("move.l %%a5, %0" : "=m"(a5)); + __asm__ ("move.l %%sp, %0" : "=r"(stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ("add.l %0, %%sp; add.l %0, %%fp" : : "r"(stsizediff)); + SLP_RESTORE_STATE(); + __asm__ volatile ("clr.l %0" : "=g" (err)); + } + __asm__ volatile ("move.l %0, %%a5" : : "m"(a5)); + __asm__ volatile ("move.l %0, %%fp" : : "m"(fp)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_mips_unix.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_mips_unix.h new file mode 100644 index 0000000..1916b26 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_mips_unix.h @@ -0,0 +1,64 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 20-Sep-14 Matt Madison + * Re-code the saving of the gp register for MIPS64. + * 05-Jan-08 Thiemo Seufer + * Ported from ppc. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "$16", "$17", "$18", "$19", "$20", "$21", "$22", \ + "$23", "$30" +static int +slp_switch(void) +{ + register int err; + register int *stackref, stsizediff; +#ifdef __mips64 + uint64_t gpsave; +#endif + __asm__ __volatile__ ("" : : : REGS_TO_SAVE); +#ifdef __mips64 + __asm__ __volatile__ ("sd $28,%0" : "=m" (gpsave) : : ); +#endif + __asm__ ("move %0, $29" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ __volatile__ ( +#ifdef __mips64 + "daddu $29, %0\n" +#else + "addu $29, %0\n" +#endif + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } +#ifdef __mips64 + __asm__ __volatile__ ("ld $28,%0" : : "m" (gpsave) : ); +#endif + __asm__ __volatile__ ("" : : : REGS_TO_SAVE); + __asm__ __volatile__ ("move %0, $0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc64_aix.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc64_aix.h new file mode 100644 index 0000000..e07b8de --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc64_aix.h @@ -0,0 +1,103 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 16-Oct-20 Jesse Gorzinski + * Copied from Linux PPC64 implementation + * 04-Sep-18 Alexey Borzenkov + * Workaround a gcc bug using manual save/restore of r30 + * 21-Mar-18 Tulio Magno Quites Machado Filho + * Added r30 to the list of saved registers in order to fully comply with + * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this + * register as a nonvolatile register used for local variables. + * 21-Mar-18 Laszlo Boszormenyi + * Save r2 (TOC pointer) manually. + * 10-Dec-13 Ulrich Weigand + * Support ELFv2 ABI. Save float/vector registers. + * 09-Mar-12 Michael Ellerman + * 64-bit implementation, copied from 32-bit. + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 6 + +#if defined(__ALTIVEC__) +#define ALTIVEC_REGS \ + "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ + "v28", "v29", "v30", "v31", +#else +#define ALTIVEC_REGS +#endif + +#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "r31", \ + "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ + "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ + "fr30", "fr31", \ + ALTIVEC_REGS \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + register int err; + register long *stackref, stsizediff; + void * toc; + void * r30; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("std 2, %0" : "=m" (toc)); + __asm__ volatile ("std 30, %0" : "=m" (r30)); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ld 30, %0" : : "m" (r30)); + __asm__ volatile ("ld 2, %0" : : "m" (toc)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc64_linux.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc64_linux.h new file mode 100644 index 0000000..88e6847 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc64_linux.h @@ -0,0 +1,105 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 04-Sep-18 Alexey Borzenkov + * Workaround a gcc bug using manual save/restore of r30 + * 21-Mar-18 Tulio Magno Quites Machado Filho + * Added r30 to the list of saved registers in order to fully comply with + * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this + * register as a nonvolatile register used for local variables. + * 21-Mar-18 Laszlo Boszormenyi + * Save r2 (TOC pointer) manually. + * 10-Dec-13 Ulrich Weigand + * Support ELFv2 ABI. Save float/vector registers. + * 09-Mar-12 Michael Ellerman + * 64-bit implementation, copied from 32-bit. + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#if _CALL_ELF == 2 +#define STACK_MAGIC 4 +#else +#define STACK_MAGIC 6 +#endif + +#if defined(__ALTIVEC__) +#define ALTIVEC_REGS \ + "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ + "v28", "v29", "v30", "v31", +#else +#define ALTIVEC_REGS +#endif + +#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "r31", \ + "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ + "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ + "fr30", "fr31", \ + ALTIVEC_REGS \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + register int err; + register long *stackref, stsizediff; + void * toc; + void * r30; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("std 2, %0" : "=m" (toc)); + __asm__ volatile ("std 30, %0" : "=m" (r30)); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ld 30, %0" : : "m" (r30)); + __asm__ volatile ("ld 2, %0" : : "m" (toc)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc_aix.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc_aix.h new file mode 100644 index 0000000..c7d476f --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc_aix.h @@ -0,0 +1,87 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Mar-11 Floris Bruynooghe + * Do not add stsizediff to general purpose + * register (GPR) 30 as this is a non-volatile and + * unused by the PowerOpen Environment, therefore + * this was modifying a user register instead of the + * frame pointer (which does not seem to exist). + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + register int err; + register int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc_linux.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc_linux.h new file mode 100644 index 0000000..0a71255 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc_linux.h @@ -0,0 +1,84 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + register int err; + register int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + "add 30, 30, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc_macosx.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc_macosx.h new file mode 100644 index 0000000..56e573f --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc_macosx.h @@ -0,0 +1,82 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + register int err; + register int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("; asm block 2\n\tmr %0, r1" : "=g" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "; asm block 3\n" + "\tmr r11, %0\n" + "\tadd r1, r1, r11\n" + "\tadd r30, r30, r11\n" + : /* no outputs */ + : "g" (stsizediff) + : "r11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc_unix.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc_unix.h new file mode 100644 index 0000000..2b3d307 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_ppc_unix.h @@ -0,0 +1,82 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + register int err; + register int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=g" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + "add 30, 30, 11\n" + : /* no outputs */ + : "g" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_riscv_unix.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_riscv_unix.h new file mode 100644 index 0000000..5b5ea98 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_riscv_unix.h @@ -0,0 +1,32 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "s0", "s1", "s2", "s3", "s4", "s5", \ + "s6", "s7", "s8", "s9", "s10", "s11", "fs0", "fs1", \ + "fs2", "fs3", "fs4", "fs5", "fs6", "fs7", "fs8", "fs9", \ + "fs10", "fs11" + +static int +slp_switch(void) +{ + register int ret; + register long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mv %0, sp" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add sp, sp, %0\n\t" + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mv %0, zero" : "=r" (ret) : ); + return ret; +} + +#endif diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_s390_unix.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_s390_unix.h new file mode 100644 index 0000000..6641854 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_s390_unix.h @@ -0,0 +1,87 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 25-Jan-12 Alexey Borzenkov + * Fixed Linux/S390 port to work correctly with + * different optimization options both on 31-bit + * and 64-bit. Thanks to Stefan Raabe for lots + * of testing. + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 06-Oct-02 Gustavo Niemeyer + * Ported to Linux/S390. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#ifdef __s390x__ +#define STACK_MAGIC 20 /* 20 * 8 = 160 bytes of function call area */ +#else +#define STACK_MAGIC 24 /* 24 * 4 = 96 bytes of function call area */ +#endif + +/* Technically, r11-r13 also need saving, but function prolog starts + with stm(g) and since there are so many saved registers already + it won't be optimized, resulting in all r6-r15 being saved */ +#define REGS_TO_SAVE "r6", "r7", "r8", "r9", "r10", "r14", \ + "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", \ + "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15" + +static int +slp_switch(void) +{ + register int ret; + register long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); +#ifdef __s390x__ + __asm__ volatile ("lgr %0, 15" : "=r" (stackref) : ); +#else + __asm__ volatile ("lr %0, 15" : "=r" (stackref) : ); +#endif + { + SLP_SAVE_STATE(stackref, stsizediff); +/* N.B. + r11 may be used as the frame pointer, and in that case it cannot be + clobbered and needs offsetting just like the stack pointer (but in cases + where frame pointer isn't used we might clobber it accidentally). What's + scary is that r11 is 2nd (and even 1st when GOT is used) callee saved + register that gcc would chose for surviving function calls. However, + since r6-r10 are clobbered above, their cost for reuse is reduced, so + gcc IRA will chose them over r11 (not seeing r11 is implicitly saved), + making it relatively safe to offset in all cases. :) */ + __asm__ volatile ( +#ifdef __s390x__ + "agr 15, %0\n\t" + "agr 11, %0" +#else + "ar 15, %0\n\t" + "ar 11, %0" +#endif + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("lhi %0, 0" : "=r" (ret) : ); + return ret; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_sparc_sun_gcc.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_sparc_sun_gcc.h new file mode 100644 index 0000000..652b57f --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_sparc_sun_gcc.h @@ -0,0 +1,92 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 16-May-15 Alexey Borzenkov + * Move stack spilling code inside save/restore functions + * 30-Aug-13 Floris Bruynooghe + Clean the register windows again before returning. + This does not clobber the PIC register as it leaves + the current window intact and is required for multi- + threaded code to work correctly. + * 08-Mar-11 Floris Bruynooghe + * No need to set return value register explicitly + * before the stack and framepointer are adjusted + * as none of the other registers are influenced by + * this. Also don't needlessly clean the windows + * ('ta %0" :: "i" (ST_CLEAN_WINDOWS)') as that + * clobbers the gcc PIC register (%l7). + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * added support for SunOS sparc with gcc + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + + +#define STACK_MAGIC 0 + + +#if defined(__sparcv9) +#define SLP_FLUSHW __asm__ volatile ("flushw") +#else +#define SLP_FLUSHW __asm__ volatile ("ta 3") /* ST_FLUSH_WINDOWS */ +#endif + +/* On sparc we need to spill register windows inside save/restore functions */ +#define SLP_BEFORE_SAVE_STATE() SLP_FLUSHW +#define SLP_BEFORE_RESTORE_STATE() SLP_FLUSHW + + +static int +slp_switch(void) +{ + register int err; + register int *stackref, stsizediff; + + /* Put current stack pointer into stackref. + * Register spilling is done in save/restore. + */ + __asm__ volatile ("mov %%sp, %0" : "=r" (stackref)); + + { + /* Thou shalt put SLP_SAVE_STATE into a local block */ + /* Copy the current stack onto the heap */ + SLP_SAVE_STATE(stackref, stsizediff); + + /* Increment stack and frame pointer by stsizediff */ + __asm__ volatile ( + "add %0, %%sp, %%sp\n\t" + "add %0, %%fp, %%fp" + : : "r" (stsizediff)); + + /* Copy new stack from it's save store on the heap */ + SLP_RESTORE_STATE(); + + __asm__ volatile ("mov %1, %0" : "=r" (err) : "i" (0)); + return err; + } +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_x32_unix.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_x32_unix.h new file mode 100644 index 0000000..cb14ec1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_x32_unix.h @@ -0,0 +1,63 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 17-Aug-12 Fantix King + * Ported from amd64. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "r12", "r13", "r14", "r15" + + +static int +slp_switch(void) +{ + void* ebp; + void* ebx; + unsigned int csr; + unsigned short cw; + register int err; + register int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("stmxcsr %0" : "=m" (csr)); + __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); + __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); + __asm__ ("movl %%esp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addl %0, %%esp\n" + "addl %0, %%ebp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); + __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); + __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_x64_masm.asm b/venv/lib/python3.10/site-packages/greenlet/platform/switch_x64_masm.asm new file mode 100644 index 0000000..f5c72a2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_x64_masm.asm @@ -0,0 +1,111 @@ +; +; stack switching code for MASM on x641 +; Kristjan Valur Jonsson, sept 2005 +; + + +;prototypes for our calls +slp_save_state_asm PROTO +slp_restore_state_asm PROTO + + +pushxmm MACRO reg + sub rsp, 16 + .allocstack 16 + movaps [rsp], reg ; faster than movups, but we must be aligned + ; .savexmm128 reg, offset (don't know what offset is, no documentation) +ENDM +popxmm MACRO reg + movaps reg, [rsp] ; faster than movups, but we must be aligned + add rsp, 16 +ENDM + +pushreg MACRO reg + push reg + .pushreg reg +ENDM +popreg MACRO reg + pop reg +ENDM + + +.code +slp_switch PROC FRAME + ;realign stack to 16 bytes after return address push, makes the following faster + sub rsp,8 + .allocstack 8 + + pushxmm xmm15 + pushxmm xmm14 + pushxmm xmm13 + pushxmm xmm12 + pushxmm xmm11 + pushxmm xmm10 + pushxmm xmm9 + pushxmm xmm8 + pushxmm xmm7 + pushxmm xmm6 + + pushreg r15 + pushreg r14 + pushreg r13 + pushreg r12 + + pushreg rbp + pushreg rbx + pushreg rdi + pushreg rsi + + sub rsp, 10h ;allocate the singlefunction argument (must be multiple of 16) + .allocstack 10h +.endprolog + + lea rcx, [rsp+10h] ;load stack base that we are saving + call slp_save_state_asm ;pass stackpointer, return offset in eax + cmp rax, 1 + je EXIT1 + cmp rax, -1 + je EXIT2 + ;actual stack switch: + add rsp, rax + call slp_restore_state_asm + xor rax, rax ;return 0 + +EXIT: + + add rsp, 10h + popreg rsi + popreg rdi + popreg rbx + popreg rbp + + popreg r12 + popreg r13 + popreg r14 + popreg r15 + + popxmm xmm6 + popxmm xmm7 + popxmm xmm8 + popxmm xmm9 + popxmm xmm10 + popxmm xmm11 + popxmm xmm12 + popxmm xmm13 + popxmm xmm14 + popxmm xmm15 + + add rsp, 8 + ret + +EXIT1: + mov rax, 1 + jmp EXIT + +EXIT2: + sar rax, 1 + jmp EXIT + +slp_switch ENDP + +END \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_x64_masm.obj b/venv/lib/python3.10/site-packages/greenlet/platform/switch_x64_masm.obj new file mode 100644 index 0000000..64e3e6b Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/platform/switch_x64_masm.obj differ diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_x64_msvc.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_x64_msvc.h new file mode 100644 index 0000000..601ea56 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_x64_msvc.h @@ -0,0 +1,60 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 26-Sep-02 Christian Tismer + * again as a result of virtualized stack access, + * the compiler used less registers. Needed to + * explicit mention registers in order to get them saved. + * Thanks to Jeff Senn for pointing this out and help. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 01-Mar-02 Christian Tismer + * Initial final version after lots of iterations for i386. + */ + +/* Avoid alloca redefined warning on mingw64 */ +#ifndef alloca +#define alloca _alloca +#endif + +#define STACK_REFPLUS 1 +#define STACK_MAGIC 0 + +/* Use the generic support for an external assembly language slp_switch function. */ +#define EXTERNAL_ASM + +#ifdef SLP_EVAL +/* This always uses the external masm assembly file. */ +#endif + +/* + * further self-processing support + */ + +/* we have IsBadReadPtr available, so we can peek at objects */ +/* +#define STACKLESS_SPY + +#ifdef IMPLEMENT_STACKLESSMODULE +#include "Windows.h" +#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) + +static int IS_ON_STACK(void*p) +{ + int stackref; + intptr_t stackbase = ((intptr_t)&stackref) & 0xfffff000; + return (intptr_t)p >= stackbase && (intptr_t)p < stackbase + 0x00100000; +} + +#endif +*/ \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_x86_msvc.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_x86_msvc.h new file mode 100644 index 0000000..010a22c --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_x86_msvc.h @@ -0,0 +1,88 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 26-Sep-02 Christian Tismer + * again as a result of virtualized stack access, + * the compiler used less registers. Needed to + * explicit mention registers in order to get them saved. + * Thanks to Jeff Senn for pointing this out and help. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 01-Mar-02 Christian Tismer + * Initial final version after lots of iterations for i386. + */ + +#define alloca _alloca + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +/* Some magic to quell warnings and keep slp_switch() from crashing when built + with VC90. Disable global optimizations, and the warning: frame pointer + register 'ebp' modified by inline assembly code */ +#pragma optimize("g", off) +#pragma warning(disable:4731) + +static int +slp_switch(void) +{ + void* seh; + register int *stackref, stsizediff; + __asm mov eax, fs:[0] + __asm mov [seh], eax + __asm mov stackref, esp; + /* modify EBX, ESI and EDI in order to get them preserved */ + __asm mov ebx, ebx; + __asm xchg esi, edi; + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm { + mov eax, stsizediff + add esp, eax + add ebp, eax + } + SLP_RESTORE_STATE(); + } + __asm mov eax, [seh] + __asm mov fs:[0], eax + return 0; +} + +/* re-enable ebp warning and global optimizations. */ +#pragma optimize("g", on) +#pragma warning(default:4731) + +#endif + +/* + * further self-processing support + */ + +/* we have IsBadReadPtr available, so we can peek at objects */ +#define STACKLESS_SPY + +#ifdef IMPLEMENT_STACKLESSMODULE +#include "Windows.h" +#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) + +static int IS_ON_STACK(void*p) +{ + int stackref; + int stackbase = ((int)&stackref) & 0xfffff000; + return (int)p >= stackbase && (int)p < stackbase + 0x00100000; +} + +#endif diff --git a/venv/lib/python3.10/site-packages/greenlet/platform/switch_x86_unix.h b/venv/lib/python3.10/site-packages/greenlet/platform/switch_x86_unix.h new file mode 100644 index 0000000..3a95186 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/platform/switch_x86_unix.h @@ -0,0 +1,105 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 3-May-13 Ralf Schmitt + * Add support for strange GCC caller-save decisions + * (ported from switch_aarch64_gcc.h) + * 19-Aug-11 Alexey Borzenkov + * Correctly save ebp, ebx and cw + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'ebx' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for spark + * 31-Avr-02 Armin Rigo + * Added ebx, esi and edi register-saves. + * 01-Mar-02 Samual M. Rushing + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +/* #define STACK_MAGIC 3 */ +/* the above works fine with gcc 2.96, but 2.95.3 wants this */ +#define STACK_MAGIC 0 + +#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) +# define ATTR_NOCLONE __attribute__((noclone)) +#else +# define ATTR_NOCLONE +#endif + +static int +slp_switch(void) +{ + int err; +#ifdef _WIN32 + void *seh; +#endif + void *ebp, *ebx; + unsigned short cw; + register int *stackref, stsizediff; + __asm__ volatile ("" : : : "esi", "edi"); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); + __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); +#ifdef _WIN32 + __asm__ volatile ( + "movl %%fs:0x0, %%eax\n" + "movl %%eax, %0\n" + : "=m" (seh) + : + : "eax"); +#endif + __asm__ ("movl %%esp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addl %0, %%esp\n" + "addl %0, %%ebp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); + } +#ifdef _WIN32 + __asm__ volatile ( + "movl %0, %%eax\n" + "movl %%eax, %%fs:0x0\n" + : + : "m" (seh) + : "eax"); +#endif + __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); + __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : "esi", "edi"); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.10/site-packages/greenlet/slp_platformselect.h b/venv/lib/python3.10/site-packages/greenlet/slp_platformselect.h new file mode 100644 index 0000000..b5e8eb6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/slp_platformselect.h @@ -0,0 +1,58 @@ +/* + * Platform Selection for Stackless Python + */ + +#if defined(MS_WIN32) && !defined(MS_WIN64) && defined(_M_IX86) && defined(_MSC_VER) +#include "platform/switch_x86_msvc.h" /* MS Visual Studio on X86 */ +#elif defined(MS_WIN64) && defined(_M_X64) && defined(_MSC_VER) || defined(__MINGW64__) +#include "platform/switch_x64_msvc.h" /* MS Visual Studio on X64 */ +#elif defined(__GNUC__) && defined(__amd64__) && defined(__ILP32__) +#include "platform/switch_x32_unix.h" /* gcc on amd64 with x32 ABI */ +#elif defined(__GNUC__) && defined(__amd64__) +#include "platform/switch_amd64_unix.h" /* gcc on amd64 */ +#elif defined(__GNUC__) && defined(__i386__) +#include "platform/switch_x86_unix.h" /* gcc on X86 */ +#elif defined(__GNUC__) && defined(__powerpc64__) && (defined(__linux__) || defined(__FreeBSD__)) +#include "platform/switch_ppc64_linux.h" /* gcc on PowerPC 64-bit */ +#elif defined(__GNUC__) && defined(__PPC__) && (defined(__linux__) || defined(__FreeBSD__)) +#include "platform/switch_ppc_linux.h" /* gcc on PowerPC */ +#elif defined(__GNUC__) && defined(__ppc__) && defined(__APPLE__) +#include "platform/switch_ppc_macosx.h" /* Apple MacOS X on PowerPC */ +#elif defined(__GNUC__) && defined(__powerpc64__) && defined(_AIX) +#include "platform/switch_ppc64_aix.h" /* gcc on AIX/PowerPC 64-bit */ +#elif defined(__GNUC__) && defined(_ARCH_PPC) && defined(_AIX) +#include "platform/switch_ppc_aix.h" /* gcc on AIX/PowerPC */ +#elif defined(__GNUC__) && defined(sparc) +#include "platform/switch_sparc_sun_gcc.h" /* SunOS sparc with gcc */ +#elif defined(__SUNPRO_C) && defined(sparc) && defined(sun) +#include "platform/switch_sparc_sun_gcc.h" /* SunStudio on amd64 */ +#elif defined(__SUNPRO_C) && defined(__amd64__) && defined(sun) +#include "platform/switch_amd64_unix.h" /* SunStudio on amd64 */ +#elif defined(__SUNPRO_C) && defined(__i386__) && defined(sun) +#include "platform/switch_x86_unix.h" /* SunStudio on x86 */ +#elif defined(__GNUC__) && defined(__s390__) && defined(__linux__) +#include "platform/switch_s390_unix.h" /* Linux/S390 */ +#elif defined(__GNUC__) && defined(__s390x__) && defined(__linux__) +#include "platform/switch_s390_unix.h" /* Linux/S390 zSeries (64-bit) */ +#elif defined(__GNUC__) && defined(__arm__) +#ifdef __APPLE__ +#include +#endif +#if TARGET_OS_IPHONE +#include "platform/switch_arm32_ios.h" /* iPhone OS on arm32 */ +#else +#include "platform/switch_arm32_gcc.h" /* gcc using arm32 */ +#endif +#elif defined(__GNUC__) && defined(__mips__) && defined(__linux__) +#include "platform/switch_mips_unix.h" /* Linux/MIPS */ +#elif defined(__GNUC__) && defined(__aarch64__) +#include "platform/switch_aarch64_gcc.h" /* Aarch64 ABI */ +#elif defined(__GNUC__) && defined(__mc68000__) +#include "platform/switch_m68k_gcc.h" /* gcc on m68k */ +#elif defined(__GNUC__) && defined(__csky__) +#include "platform/switch_csky_gcc.h" /* gcc on csky */ +#elif defined(__GNUC__) && defined(__riscv) +#include "platform/switch_riscv_unix.h" /* gcc on RISC-V */ +#elif defined(__GNUC__) && defined(__alpha__) +#include "platform/switch_alpha_unix.h" /* gcc on DEC Alpha */ +#endif diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__init__.py b/venv/lib/python3.10/site-packages/greenlet/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..867fdf3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-310.pyc new file mode 100644 index 0000000..a5dc895 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-310.pyc new file mode 100644 index 0000000..d324c3c Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-310.pyc new file mode 100644 index 0000000..a7137a3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_gc.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_gc.cpython-310.pyc new file mode 100644 index 0000000..749b033 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_gc.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_generator.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_generator.cpython-310.pyc new file mode 100644 index 0000000..6aeaab1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_generator.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-310.pyc new file mode 100644 index 0000000..851243d Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-310.pyc new file mode 100644 index 0000000..c5cd5a9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-310.pyc new file mode 100644 index 0000000..725a777 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-310.pyc new file mode 100644 index 0000000..ee8c5c5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_throw.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_throw.cpython-310.pyc new file mode 100644 index 0000000..d319364 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_throw.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-310.pyc new file mode 100644 index 0000000..f809540 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_version.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_version.cpython-310.pyc new file mode 100644 index 0000000..51597c2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-310.pyc b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-310.pyc new file mode 100644 index 0000000..83cee60 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/_test_extension.c b/venv/lib/python3.10/site-packages/greenlet/tests/_test_extension.c new file mode 100644 index 0000000..4fe087d --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/_test_extension.c @@ -0,0 +1,216 @@ +/* This is a set of functions used by test_extension_interface.py to test the + * Greenlet C API. + */ + +#include "../greenlet.h" + +#ifndef Py_RETURN_NONE +# define Py_RETURN_NONE return Py_INCREF(Py_None), Py_None +#endif + +#define TEST_MODULE_NAME "_test_extension" + +static PyObject* +test_switch(PyObject* self, PyObject* greenlet) +{ + PyObject* result = NULL; + + if (greenlet == NULL || !PyGreenlet_Check(greenlet)) { + PyErr_BadArgument(); + return NULL; + } + + result = PyGreenlet_Switch((PyGreenlet*)greenlet, NULL, NULL); + if (result == NULL) { + if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_AssertionError, + "greenlet.switch() failed for some reason."); + } + return NULL; + } + Py_INCREF(result); + return result; +} + +static PyObject* +test_switch_kwargs(PyObject* self, PyObject* args, PyObject* kwargs) +{ + PyGreenlet* g = NULL; + PyObject* result = NULL; + + PyArg_ParseTuple(args, "O!", &PyGreenlet_Type, &g); + + if (g == NULL || !PyGreenlet_Check(g)) { + PyErr_BadArgument(); + return NULL; + } + + result = PyGreenlet_Switch(g, NULL, kwargs); + if (result == NULL) { + if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_AssertionError, + "greenlet.switch() failed for some reason."); + } + return NULL; + } + Py_XINCREF(result); + return result; +} + +static PyObject* +test_getcurrent(PyObject* self) +{ + PyGreenlet* g = PyGreenlet_GetCurrent(); + if (g == NULL || !PyGreenlet_Check(g) || !PyGreenlet_ACTIVE(g)) { + PyErr_SetString(PyExc_AssertionError, + "getcurrent() returned an invalid greenlet"); + Py_XDECREF(g); + return NULL; + } + Py_DECREF(g); + Py_RETURN_NONE; +} + +static PyObject* +test_setparent(PyObject* self, PyObject* arg) +{ + PyGreenlet* current; + PyGreenlet* greenlet = NULL; + + if (arg == NULL || !PyGreenlet_Check(arg)) { + PyErr_BadArgument(); + return NULL; + } + if ((current = PyGreenlet_GetCurrent()) == NULL) { + return NULL; + } + greenlet = (PyGreenlet*)arg; + if (PyGreenlet_SetParent(greenlet, current)) { + Py_DECREF(current); + return NULL; + } + Py_DECREF(current); + if (PyGreenlet_Switch(greenlet, NULL, NULL) == NULL) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject* +test_new_greenlet(PyObject* self, PyObject* callable) +{ + PyObject* result = NULL; + PyGreenlet* greenlet = PyGreenlet_New(callable, NULL); + + if (!greenlet) { + return NULL; + } + + result = PyGreenlet_Switch(greenlet, NULL, NULL); + if (result == NULL) { + return NULL; + } + + Py_INCREF(result); + return result; +} + +static PyObject* +test_raise_dead_greenlet(PyObject* self) +{ + PyErr_SetString(PyExc_GreenletExit, "test GreenletExit exception."); + return NULL; +} + +static PyObject* +test_raise_greenlet_error(PyObject* self) +{ + PyErr_SetString(PyExc_GreenletError, "test greenlet.error exception"); + return NULL; +} + +static PyObject* +test_throw(PyObject* self, PyGreenlet* g) +{ + const char msg[] = "take that sucka!"; + PyObject* msg_obj = Py_BuildValue("s", msg); + PyGreenlet_Throw(g, PyExc_ValueError, msg_obj, NULL); + Py_DECREF(msg_obj); + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"test_switch", + (PyCFunction)test_switch, + METH_O, + "Switch to the provided greenlet sending provided arguments, and \n" + "return the results."}, + {"test_switch_kwargs", + (PyCFunction)test_switch_kwargs, + METH_VARARGS | METH_KEYWORDS, + "Switch to the provided greenlet sending the provided keyword args."}, + {"test_getcurrent", + (PyCFunction)test_getcurrent, + METH_NOARGS, + "Test PyGreenlet_GetCurrent()"}, + {"test_setparent", + (PyCFunction)test_setparent, + METH_O, + "Se the parent of the provided greenlet and switch to it."}, + {"test_new_greenlet", + (PyCFunction)test_new_greenlet, + METH_O, + "Test PyGreenlet_New()"}, + {"test_raise_dead_greenlet", + (PyCFunction)test_raise_dead_greenlet, + METH_NOARGS, + "Just raise greenlet.GreenletExit"}, + {"test_raise_greenlet_error", + (PyCFunction)test_raise_greenlet_error, + METH_NOARGS, + "Just raise greenlet.error"}, + {"test_throw", + (PyCFunction)test_throw, + METH_O, + "Throw a ValueError at the provided greenlet"}, + {NULL, NULL, 0, NULL}}; + +#if PY_MAJOR_VERSION >= 3 +# define INITERROR return NULL + +static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, + TEST_MODULE_NAME, + NULL, + 0, + test_methods, + NULL, + NULL, + NULL, + NULL}; + +PyMODINIT_FUNC +PyInit__test_extension(void) +#else +# define INITERROR return +PyMODINIT_FUNC +init_test_extension(void) +#endif +{ + PyObject* module = NULL; + +#if PY_MAJOR_VERSION >= 3 + module = PyModule_Create(&moduledef); +#else + module = Py_InitModule(TEST_MODULE_NAME, test_methods); +#endif + + if (module == NULL) { + INITERROR; + } + + PyGreenlet_Import(); + +#if PY_MAJOR_VERSION >= 3 + return module; +#endif +} diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/_test_extension.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/greenlet/tests/_test_extension.cpython-310-x86_64-linux-gnu.so new file mode 100755 index 0000000..d5a9384 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/_test_extension.cpython-310-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/_test_extension_cpp.cpp b/venv/lib/python3.10/site-packages/greenlet/tests/_test_extension_cpp.cpp new file mode 100644 index 0000000..72e3d81 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/_test_extension_cpp.cpp @@ -0,0 +1,121 @@ +/* This is a set of functions used to test C++ exceptions are not + * broken during greenlet switches + */ + +#include "../greenlet.h" + +struct exception_t { + int depth; + exception_t(int depth) : depth(depth) {} +}; + +/* Functions are called via pointers to prevent inlining */ +static void (*p_test_exception_throw)(int depth); +static PyObject* (*p_test_exception_switch_recurse)(int depth, int left); + +static void +test_exception_throw(int depth) +{ + throw exception_t(depth); +} + +static PyObject* +test_exception_switch_recurse(int depth, int left) +{ + if (left > 0) { + return p_test_exception_switch_recurse(depth, left - 1); + } + + PyObject* result = NULL; + PyGreenlet* self = PyGreenlet_GetCurrent(); + if (self == NULL) + return NULL; + + try { + PyGreenlet_Switch(self->parent, NULL, NULL); + p_test_exception_throw(depth); + PyErr_SetString(PyExc_RuntimeError, + "throwing C++ exception didn't work"); + } + catch (exception_t& e) { + if (e.depth != depth) + PyErr_SetString(PyExc_AssertionError, "depth mismatch"); + else + result = PyLong_FromLong(depth); + } + catch (...) { + PyErr_SetString(PyExc_RuntimeError, "unexpected C++ exception"); + } + + Py_DECREF(self); + return result; +} + +/* test_exception_switch(int depth) + * - recurses depth times + * - switches to parent inside try/catch block + * - throws an exception that (expected to be caught in the same function) + * - verifies depth matches (exceptions shouldn't be caught in other greenlets) + */ +static PyObject* +test_exception_switch(PyObject* self, PyObject* args) +{ + int depth; + if (!PyArg_ParseTuple(args, "i", &depth)) + return NULL; + return p_test_exception_switch_recurse(depth, depth); +} + +static PyMethodDef test_methods[] = { + {"test_exception_switch", + (PyCFunction)&test_exception_switch, + METH_VARARGS, + "Switches to parent twice, to test exception handling and greenlet " + "switching."}, + {NULL, NULL, 0, NULL}}; + +#if PY_MAJOR_VERSION >= 3 +# define INITERROR return NULL + +static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, + "greenlet.tests._test_extension_cpp", + NULL, + 0, + test_methods, + NULL, + NULL, + NULL, + NULL}; + +PyMODINIT_FUNC +PyInit__test_extension_cpp(void) +#else +# define INITERROR return +PyMODINIT_FUNC +init_test_extension_cpp(void) +#endif +{ + PyObject* module = NULL; + +#if PY_MAJOR_VERSION >= 3 + module = PyModule_Create(&moduledef); +#else + module = Py_InitModule("greenlet.tests._test_extension_cpp", test_methods); +#endif + + if (module == NULL) { + INITERROR; + } + + PyGreenlet_Import(); + if (_PyGreenlet_API == NULL) { + INITERROR; + } + + p_test_exception_throw = test_exception_throw; + p_test_exception_switch_recurse = test_exception_switch_recurse; + +#if PY_MAJOR_VERSION >= 3 + return module; +#endif +} diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/_test_extension_cpp.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/greenlet/tests/_test_extension_cpp.cpython-310-x86_64-linux-gnu.so new file mode 100755 index 0000000..967cd40 Binary files /dev/null and b/venv/lib/python3.10/site-packages/greenlet/tests/_test_extension_cpp.cpython-310-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/test_contextvars.py b/venv/lib/python3.10/site-packages/greenlet/tests/test_contextvars.py new file mode 100644 index 0000000..49b7c0d --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/test_contextvars.py @@ -0,0 +1,266 @@ +import unittest +import gc +import sys + +from functools import partial + +from greenlet import greenlet +from greenlet import getcurrent + + +try: + from contextvars import Context + from contextvars import ContextVar + from contextvars import copy_context +except ImportError: + Context = ContextVar = copy_context = None + +# We don't support testing if greenlet's built-in context var support is disabled. +@unittest.skipUnless(Context is not None, "ContextVar not supported") +class ContextVarsTests(unittest.TestCase): + def _new_ctx_run(self, *args, **kwargs): + return copy_context().run(*args, **kwargs) + + def _increment(self, greenlet_id, ctx_var, callback, counts, expect): + if expect is None: + self.assertIsNone(ctx_var.get()) + else: + self.assertEqual(ctx_var.get(), expect) + ctx_var.set(greenlet_id) + for _ in range(2): + counts[ctx_var.get()] += 1 + callback() + + def _test_context(self, propagate_by): + id_var = ContextVar("id", default=None) + id_var.set(0) + + callback = getcurrent().switch + counts = dict((i, 0) for i in range(5)) + + lets = [ + greenlet(partial( + partial( + copy_context().run, + self._increment + ) if propagate_by == "run" else self._increment, + greenlet_id=i, + ctx_var=id_var, + callback=callback, + counts=counts, + expect=( + i - 1 if propagate_by == "share" else + 0 if propagate_by in ("set", "run") else None + ) + )) + for i in range(1, 5) + ] + + for let in lets: + if propagate_by == "set": + let.gr_context = copy_context() + elif propagate_by == "share": + let.gr_context = getcurrent().gr_context + + for i in range(2): + counts[id_var.get()] += 1 + for let in lets: + let.switch() + + if propagate_by == "run": + # Must leave each context.run() in reverse order of entry + for let in reversed(lets): + let.switch() + else: + # No context.run(), so fine to exit in any order. + for let in lets: + let.switch() + + for let in lets: + self.assertTrue(let.dead) + # When using run(), we leave the run() as the greenlet dies, + # and there's no context "underneath". When not using run(), + # gr_context still reflects the context the greenlet was + # running in. + self.assertEqual(let.gr_context is None, propagate_by == "run") + + if propagate_by == "share": + self.assertEqual(counts, {0: 1, 1: 1, 2: 1, 3: 1, 4: 6}) + else: + self.assertEqual(set(counts.values()), set([2])) + + def test_context_propagated_by_context_run(self): + self._new_ctx_run(self._test_context, "run") + + def test_context_propagated_by_setting_attribute(self): + self._new_ctx_run(self._test_context, "set") + + def test_context_not_propagated(self): + self._new_ctx_run(self._test_context, None) + + def test_context_shared(self): + self._new_ctx_run(self._test_context, "share") + + def test_break_ctxvars(self): + let1 = greenlet(copy_context().run) + let2 = greenlet(copy_context().run) + let1.switch(getcurrent().switch) + let2.switch(getcurrent().switch) + # Since let2 entered the current context and let1 exits its own, the + # interpreter emits: + # RuntimeError: cannot exit context: thread state references a different context object + let1.switch() + + def test_not_broken_if_using_attribute_instead_of_context_run(self): + let1 = greenlet(getcurrent().switch) + let2 = greenlet(getcurrent().switch) + let1.gr_context = copy_context() + let2.gr_context = copy_context() + let1.switch() + let2.switch() + let1.switch() + let2.switch() + + def test_context_assignment_while_running(self): + id_var = ContextVar("id", default=None) + + def target(): + self.assertIsNone(id_var.get()) + self.assertIsNone(gr.gr_context) + + # Context is created on first use + id_var.set(1) + self.assertIsInstance(gr.gr_context, Context) + self.assertEqual(id_var.get(), 1) + self.assertEqual(gr.gr_context[id_var], 1) + + # Clearing the context makes it get re-created as another + # empty context when next used + old_context = gr.gr_context + gr.gr_context = None # assign None while running + self.assertIsNone(id_var.get()) + self.assertIsNone(gr.gr_context) + id_var.set(2) + self.assertIsInstance(gr.gr_context, Context) + self.assertEqual(id_var.get(), 2) + self.assertEqual(gr.gr_context[id_var], 2) + + new_context = gr.gr_context + getcurrent().parent.switch((old_context, new_context)) + # parent switches us back to old_context + + self.assertEqual(id_var.get(), 1) + gr.gr_context = new_context # assign non-None while running + self.assertEqual(id_var.get(), 2) + + getcurrent().parent.switch() + # parent switches us back to no context + self.assertIsNone(id_var.get()) + self.assertIsNone(gr.gr_context) + gr.gr_context = old_context + self.assertEqual(id_var.get(), 1) + + getcurrent().parent.switch() + # parent switches us back to no context + self.assertIsNone(id_var.get()) + self.assertIsNone(gr.gr_context) + + gr = greenlet(target) + + with self.assertRaisesRegex(AttributeError, "can't delete attr"): + del gr.gr_context + + self.assertIsNone(gr.gr_context) + old_context, new_context = gr.switch() + self.assertIs(new_context, gr.gr_context) + self.assertEqual(old_context[id_var], 1) + self.assertEqual(new_context[id_var], 2) + self.assertEqual(new_context.run(id_var.get), 2) + gr.gr_context = old_context # assign non-None while suspended + gr.switch() + self.assertIs(gr.gr_context, new_context) + gr.gr_context = None # assign None while suspended + gr.switch() + self.assertIs(gr.gr_context, old_context) + gr.gr_context = None + gr.switch() + self.assertIsNone(gr.gr_context) + + # Make sure there are no reference leaks + gr = None + gc.collect() + self.assertEqual(sys.getrefcount(old_context), 2) + self.assertEqual(sys.getrefcount(new_context), 2) + + def test_context_assignment_different_thread(self): + import threading + + ctx = Context() + var = ContextVar("var", default=None) + is_running = threading.Event() + should_suspend = threading.Event() + did_suspend = threading.Event() + should_exit = threading.Event() + holder = [] + + def greenlet_in_thread_fn(): + var.set(1) + is_running.set() + should_suspend.wait() + var.set(2) + getcurrent().parent.switch() + holder.append(var.get()) + + def thread_fn(): + gr = greenlet(greenlet_in_thread_fn) + gr.gr_context = ctx + holder.append(gr) + gr.switch() + did_suspend.set() + should_exit.wait() + gr.switch() + + thread = threading.Thread(target=thread_fn, daemon=True) + thread.start() + is_running.wait() + gr = holder[0] + + # Can't access or modify context if the greenlet is running + # in a different thread + with self.assertRaisesRegex(ValueError, "running in a different"): + getattr(gr, 'gr_context') + with self.assertRaisesRegex(ValueError, "running in a different"): + gr.gr_context = None + + should_suspend.set() + did_suspend.wait() + + # OK to access and modify context if greenlet is suspended + self.assertIs(gr.gr_context, ctx) + self.assertEqual(gr.gr_context[var], 2) + gr.gr_context = None + + should_exit.set() + thread.join() + + self.assertEqual(holder, [gr, None]) + + # Context can still be accessed/modified when greenlet is dead: + self.assertIsNone(gr.gr_context) + gr.gr_context = ctx + self.assertIs(gr.gr_context, ctx) + +@unittest.skipIf(Context is not None, "ContextVar supported") +class NoContextVarsTests(unittest.TestCase): + def test_contextvars_errors(self): + let1 = greenlet(getcurrent().switch) + self.assertFalse(hasattr(let1, 'gr_context')) + with self.assertRaises(AttributeError): + getattr(let1, 'gr_context') + with self.assertRaises(AttributeError): + let1.gr_context = None + let1.switch() + with self.assertRaises(AttributeError): + getattr(let1, 'gr_context') + with self.assertRaises(AttributeError): + let1.gr_context = None diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/test_cpp.py b/venv/lib/python3.10/site-packages/greenlet/tests/test_cpp.py new file mode 100644 index 0000000..741ea10 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/test_cpp.py @@ -0,0 +1,18 @@ +from __future__ import print_function +from __future__ import absolute_import + +import unittest + +import greenlet +from . import _test_extension_cpp + + +class CPPTests(unittest.TestCase): + def test_exception_switch(self): + greenlets = [] + for i in range(4): + g = greenlet.greenlet(_test_extension_cpp.test_exception_switch) + g.switch(i) + greenlets.append(g) + for i, g in enumerate(greenlets): + self.assertEqual(g.switch(), i) diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/test_extension_interface.py b/venv/lib/python3.10/site-packages/greenlet/tests/test_extension_interface.py new file mode 100644 index 0000000..a92ea1f --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/test_extension_interface.py @@ -0,0 +1,77 @@ +from __future__ import print_function +from __future__ import absolute_import + +import sys +import unittest + +import greenlet +from . import _test_extension + + +class CAPITests(unittest.TestCase): + def test_switch(self): + self.assertEqual( + 50, _test_extension.test_switch(greenlet.greenlet(lambda: 50))) + + def test_switch_kwargs(self): + def foo(x, y): + return x * y + g = greenlet.greenlet(foo) + self.assertEqual(6, _test_extension.test_switch_kwargs(g, x=3, y=2)) + + def test_setparent(self): + def foo(): + def bar(): + greenlet.getcurrent().parent.switch() + + # This final switch should go back to the main greenlet, since + # the test_setparent() function in the C extension should have + # reparented this greenlet. + greenlet.getcurrent().parent.switch() + raise AssertionError("Should never have reached this code") + child = greenlet.greenlet(bar) + child.switch() + greenlet.getcurrent().parent.switch(child) + greenlet.getcurrent().parent.throw( + AssertionError("Should never reach this code")) + foo_child = greenlet.greenlet(foo).switch() + self.assertEqual(None, _test_extension.test_setparent(foo_child)) + + def test_getcurrent(self): + _test_extension.test_getcurrent() + + def test_new_greenlet(self): + self.assertEqual(-15, _test_extension.test_new_greenlet(lambda: -15)) + + def test_raise_greenlet_dead(self): + self.assertRaises( + greenlet.GreenletExit, _test_extension.test_raise_dead_greenlet) + + def test_raise_greenlet_error(self): + self.assertRaises( + greenlet.error, _test_extension.test_raise_greenlet_error) + + def test_throw(self): + seen = [] + + def foo(): + try: + greenlet.getcurrent().parent.switch() + except ValueError: + seen.append(sys.exc_info()[1]) + except greenlet.GreenletExit: + raise AssertionError + g = greenlet.greenlet(foo) + g.switch() + _test_extension.test_throw(g) + self.assertEqual(len(seen), 1) + self.assertTrue( + isinstance(seen[0], ValueError), + "ValueError was not raised in foo()") + self.assertEqual( + str(seen[0]), + 'take that sucka!', + "message doesn't match") + +if __name__ == '__main__': + unittest.main() diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/test_gc.py b/venv/lib/python3.10/site-packages/greenlet/tests/test_gc.py new file mode 100644 index 0000000..a2a41ca --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/test_gc.py @@ -0,0 +1,77 @@ +import gc +import sys +import unittest +import weakref + +import greenlet + + +class GCTests(unittest.TestCase): + def test_dead_circular_ref(self): + o = weakref.ref(greenlet.greenlet(greenlet.getcurrent).switch()) + gc.collect() + self.assertTrue(o() is None) + self.assertFalse(gc.garbage, gc.garbage) + + if greenlet.GREENLET_USE_GC: + # These only work with greenlet gc support + + def test_circular_greenlet(self): + class circular_greenlet(greenlet.greenlet): + pass + o = circular_greenlet() + o.self = o + o = weakref.ref(o) + gc.collect() + self.assertTrue(o() is None) + self.assertFalse(gc.garbage, gc.garbage) + + def test_inactive_ref(self): + class inactive_greenlet(greenlet.greenlet): + def __init__(self): + greenlet.greenlet.__init__(self, run=self.run) + + def run(self): + pass + o = inactive_greenlet() + o = weakref.ref(o) + gc.collect() + self.assertTrue(o() is None) + self.assertFalse(gc.garbage, gc.garbage) + + def test_finalizer_crash(self): + # This test is designed to crash when active greenlets + # are made garbage collectable, until the underlying + # problem is resolved. How does it work: + # - order of object creation is important + # - array is created first, so it is moved to unreachable first + # - we create a cycle between a greenlet and this array + # - we create an object that participates in gc, is only + # referenced by a greenlet, and would corrupt gc lists + # on destruction, the easiest is to use an object with + # a finalizer + # - because array is the first object in unreachable it is + # cleared first, which causes all references to greenlet + # to disappear and causes greenlet to be destroyed, but since + # it is still live it causes a switch during gc, which causes + # an object with finalizer to be destroyed, which causes stack + # corruption and then a crash + class object_with_finalizer(object): + def __del__(self): + pass + array = [] + parent = greenlet.getcurrent() + def greenlet_body(): + greenlet.getcurrent().object = object_with_finalizer() + try: + parent.switch() + finally: + del greenlet.getcurrent().object + g = greenlet.greenlet(greenlet_body) + g.array = array + array.append(g) + g.switch() + del array + del g + greenlet.getcurrent() + gc.collect() diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/test_generator.py b/venv/lib/python3.10/site-packages/greenlet/tests/test_generator.py new file mode 100644 index 0000000..62f9f26 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/test_generator.py @@ -0,0 +1,59 @@ +import unittest +from greenlet import greenlet + + +class genlet(greenlet): + + def __init__(self, *args, **kwds): + self.args = args + self.kwds = kwds + + def run(self): + fn, = self.fn + fn(*self.args, **self.kwds) + + def __iter__(self): + return self + + def __next__(self): + self.parent = greenlet.getcurrent() + result = self.switch() + if self: + return result + else: + raise StopIteration + + # Hack: Python < 2.6 compatibility + next = __next__ + + +def Yield(value): + g = greenlet.getcurrent() + while not isinstance(g, genlet): + if g is None: + raise RuntimeError('yield outside a genlet') + g = g.parent + g.parent.switch(value) + + +def generator(func): + class generator(genlet): + fn = (func,) + return generator + +# ____________________________________________________________ + + +class GeneratorTests(unittest.TestCase): + def test_generator(self): + seen = [] + + def g(n): + for i in range(n): + seen.append(i) + Yield(i) + g = generator(g) + for k in range(3): + for j in g(5): + seen.append(j) + self.assertEqual(seen, 3 * [0, 0, 1, 1, 2, 2, 3, 3, 4, 4]) diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/test_generator_nested.py b/venv/lib/python3.10/site-packages/greenlet/tests/test_generator_nested.py new file mode 100644 index 0000000..6b4f023 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/test_generator_nested.py @@ -0,0 +1,165 @@ +import unittest +from greenlet import greenlet + + +class genlet(greenlet): + + def __init__(self, *args, **kwds): + self.args = args + self.kwds = kwds + self.child = None + + def run(self): + fn, = self.fn + fn(*self.args, **self.kwds) + + def __iter__(self): + return self + + def set_child(self, child): + self.child = child + + def __next__(self): + if self.child: + child = self.child + while child.child: + tmp = child + child = child.child + tmp.child = None + + result = child.switch() + else: + self.parent = greenlet.getcurrent() + result = self.switch() + + if self: + return result + else: + raise StopIteration + + # Hack: Python < 2.6 compatibility + next = __next__ + + +def Yield(value, level=1): + g = greenlet.getcurrent() + + while level != 0: + if not isinstance(g, genlet): + raise RuntimeError('yield outside a genlet') + if level > 1: + g.parent.set_child(g) + g = g.parent + level -= 1 + + g.switch(value) + + +def Genlet(func): + class Genlet(genlet): + fn = (func,) + return Genlet + +# ____________________________________________________________ + + +def g1(n, seen): + for i in range(n): + seen.append(i + 1) + yield i + + +def g2(n, seen): + for i in range(n): + seen.append(i + 1) + Yield(i) + +g2 = Genlet(g2) + + +def nested(i): + Yield(i) + + +def g3(n, seen): + for i in range(n): + seen.append(i + 1) + nested(i) +g3 = Genlet(g3) + + +def a(n): + if n == 0: + return + for ii in ax(n - 1): + Yield(ii) + Yield(n) +ax = Genlet(a) + + +def perms(l): + if len(l) > 1: + for e in l: + # No syntactical sugar for generator expressions + [Yield([e] + p) for p in perms([x for x in l if x != e])] + else: + Yield(l) +perms = Genlet(perms) + + +def gr1(n): + for ii in range(1, n): + Yield(ii) + Yield(ii * ii, 2) + +gr1 = Genlet(gr1) + + +def gr2(n, seen): + for ii in gr1(n): + seen.append(ii) + +gr2 = Genlet(gr2) + + +class NestedGeneratorTests(unittest.TestCase): + def test_layered_genlets(self): + seen = [] + for ii in gr2(5, seen): + seen.append(ii) + self.assertEqual(seen, [1, 1, 2, 4, 3, 9, 4, 16]) + + def test_permutations(self): + gen_perms = perms(list(range(4))) + permutations = list(gen_perms) + self.assertEqual(len(permutations), 4 * 3 * 2 * 1) + self.assertTrue([0, 1, 2, 3] in permutations) + self.assertTrue([3, 2, 1, 0] in permutations) + res = [] + for ii in zip(perms(list(range(4))), perms(list(range(3)))): + res.append(ii) + self.assertEqual( + res, + [([0, 1, 2, 3], [0, 1, 2]), ([0, 1, 3, 2], [0, 2, 1]), + ([0, 2, 1, 3], [1, 0, 2]), ([0, 2, 3, 1], [1, 2, 0]), + ([0, 3, 1, 2], [2, 0, 1]), ([0, 3, 2, 1], [2, 1, 0])]) + # XXX Test to make sure we are working as a generator expression + + def test_genlet_simple(self): + for g in [g1, g2, g3]: + seen = [] + for k in range(3): + for j in g(5, seen): + seen.append(j) + self.assertEqual(seen, 3 * [1, 0, 2, 1, 3, 2, 4, 3, 5, 4]) + + def test_genlet_bad(self): + try: + Yield(10) + except RuntimeError: + pass + + def test_nested_genlets(self): + seen = [] + for ii in ax(5): + seen.append(ii) diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/test_greenlet.py b/venv/lib/python3.10/site-packages/greenlet/tests/test_greenlet.py new file mode 100644 index 0000000..5509a8b --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/test_greenlet.py @@ -0,0 +1,728 @@ +import gc +import sys +import time +import threading +import unittest +from abc import ABCMeta, abstractmethod + +from greenlet import greenlet + +# We manually manage locks in many tests +# pylint:disable=consider-using-with + +class SomeError(Exception): + pass + + +def fmain(seen): + try: + greenlet.getcurrent().parent.switch() + except: + seen.append(sys.exc_info()[0]) + raise + raise SomeError + + +def send_exception(g, exc): + # note: send_exception(g, exc) can be now done with g.throw(exc). + # the purpose of this test is to explicitely check the propagation rules. + def crasher(exc): + raise exc + g1 = greenlet(crasher, parent=g) + g1.switch(exc) + + +class TestGreenlet(unittest.TestCase): + def test_simple(self): + lst = [] + + def f(): + lst.append(1) + greenlet.getcurrent().parent.switch() + lst.append(3) + g = greenlet(f) + lst.append(0) + g.switch() + lst.append(2) + g.switch() + lst.append(4) + self.assertEqual(lst, list(range(5))) + + def test_parent_equals_None(self): + g = greenlet(parent=None) + self.assertIsNotNone(g) + self.assertIs(g.parent, greenlet.getcurrent()) + + def test_run_equals_None(self): + g = greenlet(run=None) + self.assertIsNotNone(g) + self.assertIsNone(g.run) + + def test_two_children(self): + lst = [] + + def f(): + lst.append(1) + greenlet.getcurrent().parent.switch() + lst.extend([1, 1]) + g = greenlet(f) + h = greenlet(f) + g.switch() + self.assertEqual(len(lst), 1) + h.switch() + self.assertEqual(len(lst), 2) + h.switch() + self.assertEqual(len(lst), 4) + self.assertEqual(h.dead, True) + g.switch() + self.assertEqual(len(lst), 6) + self.assertEqual(g.dead, True) + + def test_two_recursive_children(self): + lst = [] + + def f(): + lst.append(1) + greenlet.getcurrent().parent.switch() + + def g(): + lst.append(1) + g = greenlet(f) + g.switch() + lst.append(1) + g = greenlet(g) + g.switch() + self.assertEqual(len(lst), 3) + self.assertEqual(sys.getrefcount(g), 2) + + def test_threads(self): + success = [] + + def f(): + self.test_simple() + success.append(True) + ths = [threading.Thread(target=f) for i in range(10)] + for th in ths: + th.start() + for th in ths: + th.join() + self.assertEqual(len(success), len(ths)) + + def test_exception(self): + seen = [] + g1 = greenlet(fmain) + g2 = greenlet(fmain) + g1.switch(seen) + g2.switch(seen) + g2.parent = g1 + self.assertEqual(seen, []) + self.assertRaises(SomeError, g2.switch) + self.assertEqual(seen, [SomeError]) + g2.switch() + self.assertEqual(seen, [SomeError]) + + def test_send_exception(self): + seen = [] + g1 = greenlet(fmain) + g1.switch(seen) + self.assertRaises(KeyError, send_exception, g1, KeyError) + self.assertEqual(seen, [KeyError]) + + def test_dealloc(self): + seen = [] + g1 = greenlet(fmain) + g2 = greenlet(fmain) + g1.switch(seen) + g2.switch(seen) + self.assertEqual(seen, []) + del g1 + gc.collect() + self.assertEqual(seen, [greenlet.GreenletExit]) + del g2 + gc.collect() + self.assertEqual(seen, [greenlet.GreenletExit, greenlet.GreenletExit]) + + def test_dealloc_other_thread(self): + seen = [] + someref = [] + lock = threading.Lock() + lock.acquire() + lock2 = threading.Lock() + lock2.acquire() + + def f(): + g1 = greenlet(fmain) + g1.switch(seen) + someref.append(g1) + del g1 + gc.collect() + lock.release() + lock2.acquire() + greenlet() # trigger release + lock.release() + lock2.acquire() + t = threading.Thread(target=f) + t.start() + lock.acquire() + self.assertEqual(seen, []) + self.assertEqual(len(someref), 1) + del someref[:] + gc.collect() + # g1 is not released immediately because it's from another thread + self.assertEqual(seen, []) + lock2.release() + lock.acquire() + self.assertEqual(seen, [greenlet.GreenletExit]) + lock2.release() + t.join() + + def test_frame(self): + def f1(): + f = sys._getframe(0) # pylint:disable=protected-access + self.assertEqual(f.f_back, None) + greenlet.getcurrent().parent.switch(f) + return "meaning of life" + g = greenlet(f1) + frame = g.switch() + self.assertTrue(frame is g.gr_frame) + self.assertTrue(g) + + from_g = g.switch() + self.assertFalse(g) + self.assertEqual(from_g, 'meaning of life') + self.assertEqual(g.gr_frame, None) + + def test_thread_bug(self): + def runner(x): + g = greenlet(lambda: time.sleep(x)) + g.switch() + t1 = threading.Thread(target=runner, args=(0.2,)) + t2 = threading.Thread(target=runner, args=(0.3,)) + t1.start() + t2.start() + t1.join() + t2.join() + + def test_switch_kwargs(self): + def run(a, b): + self.assertEqual(a, 4) + self.assertEqual(b, 2) + return 42 + x = greenlet(run).switch(a=4, b=2) + self.assertEqual(x, 42) + + def test_switch_kwargs_to_parent(self): + def run(x): + greenlet.getcurrent().parent.switch(x=x) + greenlet.getcurrent().parent.switch(2, x=3) + return x, x ** 2 + g = greenlet(run) + self.assertEqual({'x': 3}, g.switch(3)) + self.assertEqual(((2,), {'x': 3}), g.switch()) + self.assertEqual((3, 9), g.switch()) + + def test_switch_to_another_thread(self): + data = {} + error = None + created_event = threading.Event() + done_event = threading.Event() + + def run(): + data['g'] = greenlet(lambda: None) + created_event.set() + done_event.wait() + thread = threading.Thread(target=run) + thread.start() + created_event.wait() + try: + data['g'].switch() + except greenlet.error: + error = sys.exc_info()[1] + self.assertIsNotNone(error, "greenlet.error was not raised!") + done_event.set() + thread.join() + + def test_exc_state(self): + def f(): + try: + raise ValueError('fun') + except: # pylint:disable=bare-except + exc_info = sys.exc_info() + greenlet(h).switch() + self.assertEqual(exc_info, sys.exc_info()) + + def h(): + self.assertEqual(sys.exc_info(), (None, None, None)) + + greenlet(f).switch() + + def test_instance_dict(self): + def f(): + greenlet.getcurrent().test = 42 + def deldict(g): + del g.__dict__ + def setdict(g, value): + g.__dict__ = value + g = greenlet(f) + self.assertEqual(g.__dict__, {}) + g.switch() + self.assertEqual(g.test, 42) + self.assertEqual(g.__dict__, {'test': 42}) + g.__dict__ = g.__dict__ + self.assertEqual(g.__dict__, {'test': 42}) + self.assertRaises(TypeError, deldict, g) + self.assertRaises(TypeError, setdict, g, 42) + + def test_threaded_reparent(self): + data = {} + created_event = threading.Event() + done_event = threading.Event() + + def run(): + data['g'] = greenlet(lambda: None) + created_event.set() + done_event.wait() + + def blank(): + greenlet.getcurrent().parent.switch() + + def setparent(g, value): + g.parent = value + + thread = threading.Thread(target=run) + thread.start() + created_event.wait() + g = greenlet(blank) + g.switch() + self.assertRaises(ValueError, setparent, g, data['g']) + done_event.set() + thread.join() + + def test_deepcopy(self): + import copy + self.assertRaises(TypeError, copy.copy, greenlet()) + self.assertRaises(TypeError, copy.deepcopy, greenlet()) + + def test_parent_restored_on_kill(self): + hub = greenlet(lambda: None) + main = greenlet.getcurrent() + result = [] + def worker(): + try: + # Wait to be killed + main.switch() + except greenlet.GreenletExit: + # Resurrect and switch to parent + result.append(greenlet.getcurrent().parent) + result.append(greenlet.getcurrent()) + hub.switch() + g = greenlet(worker, parent=hub) + g.switch() + del g + self.assertTrue(result) + self.assertEqual(result[0], main) + self.assertEqual(result[1].parent, hub) + + def test_parent_return_failure(self): + # No run causes AttributeError on switch + g1 = greenlet() + # Greenlet that implicitly switches to parent + g2 = greenlet(lambda: None, parent=g1) + # AttributeError should propagate to us, no fatal errors + self.assertRaises(AttributeError, g2.switch) + + def test_throw_exception_not_lost(self): + class mygreenlet(greenlet): + def __getattribute__(self, name): + try: + raise Exception() + except: # pylint:disable=bare-except + pass + return greenlet.__getattribute__(self, name) + g = mygreenlet(lambda: None) + self.assertRaises(SomeError, g.throw, SomeError()) + + def test_throw_doesnt_crash(self): + result = [] + def worker(): + greenlet.getcurrent().parent.switch() + def creator(): + g = greenlet(worker) + g.switch() + result.append(g) + t = threading.Thread(target=creator) + t.start() + t.join() + self.assertRaises(greenlet.error, result[0].throw, SomeError()) + + def test_recursive_startup(self): + class convoluted(greenlet): + def __init__(self): + greenlet.__init__(self) + self.count = 0 + def __getattribute__(self, name): + if name == 'run' and self.count == 0: + self.count = 1 + self.switch(43) + return greenlet.__getattribute__(self, name) + def run(self, value): + while True: + self.parent.switch(value) + g = convoluted() + self.assertEqual(g.switch(42), 43) + + def test_unexpected_reparenting(self): + another = [] + def worker(): + g = greenlet(lambda: None) + another.append(g) + g.switch() + t = threading.Thread(target=worker) + t.start() + t.join() + class convoluted(greenlet): + def __getattribute__(self, name): + if name == 'run': + self.parent = another[0] # pylint:disable=attribute-defined-outside-init + return greenlet.__getattribute__(self, name) + g = convoluted(lambda: None) + self.assertRaises(greenlet.error, g.switch) + + def test_threaded_updatecurrent(self): + # released when main thread should execute + lock1 = threading.Lock() + lock1.acquire() + # released when another thread should execute + lock2 = threading.Lock() + lock2.acquire() + class finalized(object): + def __del__(self): + # happens while in green_updatecurrent() in main greenlet + # should be very careful not to accidentally call it again + # at the same time we must make sure another thread executes + lock2.release() + lock1.acquire() + # now ts_current belongs to another thread + def deallocator(): + greenlet.getcurrent().parent.switch() + def fthread(): + lock2.acquire() + greenlet.getcurrent() + del g[0] + lock1.release() + lock2.acquire() + greenlet.getcurrent() + lock1.release() + main = greenlet.getcurrent() + g = [greenlet(deallocator)] + g[0].bomb = finalized() + g[0].switch() + t = threading.Thread(target=fthread) + t.start() + # let another thread grab ts_current and deallocate g[0] + lock2.release() + lock1.acquire() + # this is the corner stone + # getcurrent() will notice that ts_current belongs to another thread + # and start the update process, which would notice that g[0] should + # be deallocated, and that will execute an object's finalizer. Now, + # that object will let another thread run so it can grab ts_current + # again, which would likely crash the interpreter if there's no + # check for this case at the end of green_updatecurrent(). This test + # passes if getcurrent() returns correct result, but it's likely + # to randomly crash if it's not anyway. + self.assertEqual(greenlet.getcurrent(), main) + # wait for another thread to complete, just in case + t.join() + + def test_dealloc_switch_args_not_lost(self): + seen = [] + def worker(): + # wait for the value + value = greenlet.getcurrent().parent.switch() + # delete all references to ourself + del worker[0] + initiator.parent = greenlet.getcurrent().parent + # switch to main with the value, but because + # ts_current is the last reference to us we + # return immediately + try: + greenlet.getcurrent().parent.switch(value) + finally: + seen.append(greenlet.getcurrent()) + def initiator(): + return 42 # implicitly falls thru to parent + worker = [greenlet(worker)] + worker[0].switch() # prime worker + initiator = greenlet(initiator, worker[0]) + value = initiator.switch() + self.assertTrue(seen) + self.assertEqual(value, 42) + + + + def test_tuple_subclass(self): + if sys.version_info[0] > 2: + # There's no apply in Python 3.x + def _apply(func, a, k): + func(*a, **k) + else: + _apply = apply # pylint:disable=undefined-variable + + class mytuple(tuple): + def __len__(self): + greenlet.getcurrent().switch() + return tuple.__len__(self) + args = mytuple() + kwargs = dict(a=42) + def switchapply(): + _apply(greenlet.getcurrent().parent.switch, args, kwargs) + g = greenlet(switchapply) + self.assertEqual(g.switch(), kwargs) + + def test_abstract_subclasses(self): + AbstractSubclass = ABCMeta( + 'AbstractSubclass', + (greenlet,), + {'run': abstractmethod(lambda self: None)}) + + class BadSubclass(AbstractSubclass): + pass + + class GoodSubclass(AbstractSubclass): + def run(self): + pass + + GoodSubclass() # should not raise + self.assertRaises(TypeError, BadSubclass) + + def test_implicit_parent_with_threads(self): + if not gc.isenabled(): + return # cannot test with disabled gc + N = gc.get_threshold()[0] + if N < 50: + return # cannot test with such a small N + def attempt(): + lock1 = threading.Lock() + lock1.acquire() + lock2 = threading.Lock() + lock2.acquire() + recycled = [False] + def another_thread(): + lock1.acquire() # wait for gc + greenlet.getcurrent() # update ts_current + lock2.release() # release gc + t = threading.Thread(target=another_thread) + t.start() + class gc_callback(object): + def __del__(self): + lock1.release() + lock2.acquire() + recycled[0] = True + class garbage(object): + def __init__(self): + self.cycle = self + self.callback = gc_callback() + l = [] + x = range(N*2) + current = greenlet.getcurrent() + g = garbage() + for _ in x: + g = None # lose reference to garbage + if recycled[0]: + # gc callback called prematurely + t.join() + return False + last = greenlet() + if recycled[0]: + break # yes! gc called in green_new + l.append(last) # increase allocation counter + else: + # gc callback not called when expected + gc.collect() + if recycled[0]: + t.join() + return False + self.assertEqual(last.parent, current) + for g in l: + self.assertEqual(g.parent, current) + return True + for _ in range(5): + if attempt(): + break + + def test_issue_245_reference_counting_subclass_no_threads(self): + # https://github.com/python-greenlet/greenlet/issues/245 + # Before the fix, this crashed pretty reliably on + # Python 3.10, at least on macOS; but much less reliably on other + # interpreters (memory layout must have changed). + # The threaded test crashed more reliably on more interpreters. + from greenlet import getcurrent + from greenlet import GreenletExit + + class Greenlet(greenlet): + pass + + initial_refs = sys.getrefcount(Greenlet) + # This has to be an instance variable because + # Python 2 raises a SyntaxError if we delete a local + # variable referenced in an inner scope. + self.glets = [] # pylint:disable=attribute-defined-outside-init + + def greenlet_main(): + try: + getcurrent().parent.switch() + except GreenletExit: + self.glets.append(getcurrent()) + + # Before the + for _ in range(10): + Greenlet(greenlet_main).switch() + + del self.glets + self.assertEqual(sys.getrefcount(Greenlet), initial_refs) + + def test_issue_245_reference_counting_subclass_threads(self): + # https://github.com/python-greenlet/greenlet/issues/245 + from threading import Thread + from threading import Event + + from greenlet import getcurrent + + class MyGreenlet(greenlet): + pass + + glets = [] + ref_cleared = Event() + + def greenlet_main(): + getcurrent().parent.switch() + + def thread_main(greenlet_running_event): + mine = MyGreenlet(greenlet_main) + glets.append(mine) + # The greenlets being deleted must be active + mine.switch() + # Don't keep any reference to it in this thread + del mine + # Let main know we published our greenlet. + greenlet_running_event.set() + # Wait for main to let us know the references are + # gone and the greenlet objects no longer reachable + ref_cleared.wait() + # The creating thread must call getcurrent() (or a few other + # greenlet APIs) because that's when the thread-local list of dead + # greenlets gets cleared. + getcurrent() + + # We start with 3 references to the subclass: + # - This module + # - Its __mro__ + # - The __subclassess__ attribute of greenlet + # - (If we call gc.get_referents(), we find four entries, including + # some other tuple ``(greenlet)`` that I'm not sure about but must be part + # of the machinery.) + # + # On Python 3.10 it's often enough to just run 3 threads; on Python 2.7, + # more threads are needed, and the results are still + # non-deterministic. Presumably the memory layouts are different + initial_refs = sys.getrefcount(MyGreenlet) + thread_ready_events = [] + for _ in range( + initial_refs + 45 + ): + event = Event() + thread = Thread(target=thread_main, args=(event,)) + thread_ready_events.append(event) + thread.start() + + + for done_event in thread_ready_events: + done_event.wait() + + + del glets[:] + ref_cleared.set() + # Let any other thread run; it will crash the interpreter + # if not fixed (or silently corrupt memory and we possibly crash + # later). + time.sleep(1) + self.assertEqual(sys.getrefcount(MyGreenlet), initial_refs) + + +class TestRepr(unittest.TestCase): + + def assertEndsWith(self, got, suffix): + self.assertTrue(got.endswith(suffix), (got, suffix)) + + def test_main_while_running(self): + r = repr(greenlet.getcurrent()) + self.assertEndsWith(r, " current active started main>") + + def test_main_in_background(self): + main = greenlet.getcurrent() + def run(): + return repr(main) + + g = greenlet(run) + r = g.switch() + self.assertEndsWith(r, ' suspended active started main>') + + def test_initial(self): + r = repr(greenlet()) + self.assertEndsWith(r, ' pending>') + + def test_main_from_other_thread(self): + main = greenlet.getcurrent() + + class T(threading.Thread): + original_main = thread_main = None + main_glet = None + def run(self): + self.original_main = repr(main) + self.main_glet = greenlet.getcurrent() + self.thread_main = repr(self.main_glet) + + t = T() + t.start() + t.join(10) + + self.assertEndsWith(t.original_main, ' suspended active started main>') + self.assertEndsWith(t.thread_main, ' current active started main>') + + r = repr(t.main_glet) + # main greenlets, even from dead threads, never really appear dead + # TODO: Can we find a better way to differentiate that? + assert not t.main_glet.dead + self.assertEndsWith(r, ' suspended active started main>') + + def test_dead(self): + g = greenlet(lambda: None) + g.switch() + self.assertEndsWith(repr(g), ' dead>') + self.assertNotIn('suspended', repr(g)) + self.assertNotIn('started', repr(g)) + self.assertNotIn('active', repr(g)) + + def test_formatting_produces_native_str(self): + # https://github.com/python-greenlet/greenlet/issues/218 + # %s formatting on Python 2 was producing unicode, not str. + + g_dead = greenlet(lambda: None) + g_not_started = greenlet(lambda: None) + g_cur = greenlet.getcurrent() + + for g in g_dead, g_not_started, g_cur: + + self.assertIsInstance( + '%s' % (g,), + str + ) + self.assertIsInstance( + '%r' % (g,), + str, + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/test_leaks.py b/venv/lib/python3.10/site-packages/greenlet/tests/test_leaks.py new file mode 100644 index 0000000..2b02bfd --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/test_leaks.py @@ -0,0 +1,178 @@ +import unittest +import sys +import gc + +import time +import weakref +import threading + +import greenlet + +class TestLeaks(unittest.TestCase): + + def test_arg_refs(self): + args = ('a', 'b', 'c') + refcount_before = sys.getrefcount(args) + # pylint:disable=unnecessary-lambda + g = greenlet.greenlet( + lambda *args: greenlet.getcurrent().parent.switch(*args)) + for _ in range(100): + g.switch(*args) + self.assertEqual(sys.getrefcount(args), refcount_before) + + def test_kwarg_refs(self): + kwargs = {} + # pylint:disable=unnecessary-lambda + g = greenlet.greenlet( + lambda **kwargs: greenlet.getcurrent().parent.switch(**kwargs)) + for _ in range(100): + g.switch(**kwargs) + self.assertEqual(sys.getrefcount(kwargs), 2) + + assert greenlet.GREENLET_USE_GC # Option to disable this was removed in 1.0 + + def recycle_threads(self): + # By introducing a thread that does sleep we allow other threads, + # that have triggered their __block condition, but did not have a + # chance to deallocate their thread state yet, to finally do so. + # The way it works is by requiring a GIL switch (different thread), + # which does a GIL release (sleep), which might do a GIL switch + # to finished threads and allow them to clean up. + def worker(): + time.sleep(0.001) + t = threading.Thread(target=worker) + t.start() + time.sleep(0.001) + t.join() + + def test_threaded_leak(self): + gg = [] + def worker(): + # only main greenlet present + gg.append(weakref.ref(greenlet.getcurrent())) + for _ in range(2): + t = threading.Thread(target=worker) + t.start() + t.join() + del t + greenlet.getcurrent() # update ts_current + self.recycle_threads() + greenlet.getcurrent() # update ts_current + gc.collect() + greenlet.getcurrent() # update ts_current + for g in gg: + self.assertIsNone(g()) + + def test_threaded_adv_leak(self): + gg = [] + def worker(): + # main and additional *finished* greenlets + ll = greenlet.getcurrent().ll = [] + def additional(): + ll.append(greenlet.getcurrent()) + for _ in range(2): + greenlet.greenlet(additional).switch() + gg.append(weakref.ref(greenlet.getcurrent())) + for _ in range(2): + t = threading.Thread(target=worker) + t.start() + t.join() + del t + greenlet.getcurrent() # update ts_current + self.recycle_threads() + greenlet.getcurrent() # update ts_current + gc.collect() + greenlet.getcurrent() # update ts_current + for g in gg: + self.assertIsNone(g()) + + def test_issue251_killing_cross_thread_leaks_list(self, manually_collect_background=True): + # See https://github.com/python-greenlet/greenlet/issues/251 + # Killing a greenlet (probably not the main one) + # in one thread from another thread would + # result in leaking a list (the ts_delkey list). + + # For the test to be valid, even empty lists have to be tracked by the + # GC + assert gc.is_tracked([]) + + def count_objects(kind=list): + # pylint:disable=unidiomatic-typecheck + # Collect the garbage. + for _ in range(3): + gc.collect() + gc.collect() + return sum( + 1 + for x in gc.get_objects() + if type(x) is kind + ) + + # XXX: The main greenlet of a dead thread is only released + # when one of the proper greenlet APIs is used from a different + # running thread. See #252 (https://github.com/python-greenlet/greenlet/issues/252) + greenlet.getcurrent() + greenlets_before = count_objects(greenlet.greenlet) + + background_glet_running = threading.Event() + background_glet_killed = threading.Event() + background_greenlets = [] + def background_greenlet(): + # Throw control back to the main greenlet. + greenlet.getcurrent().parent.switch() + + def background_thread(): + glet = greenlet.greenlet(background_greenlet) + background_greenlets.append(glet) + glet.switch() # Be sure it's active. + # Control is ours again. + del glet # Delete one reference from the thread it runs in. + background_glet_running.set() + background_glet_killed.wait() + # To trigger the background collection of the dead + # greenlet, thus clearing out the contents of the list, we + # need to run some APIs. See issue 252. + if manually_collect_background: + greenlet.getcurrent() + + + t = threading.Thread(target=background_thread) + t.start() + background_glet_running.wait() + + lists_before = count_objects() + + assert len(background_greenlets) == 1 + self.assertFalse(background_greenlets[0].dead) + # Delete the last reference to the background greenlet + # from a different thread. This puts it in the background thread's + # ts_delkey list. + del background_greenlets[:] + background_glet_killed.set() + + # Now wait for the background thread to die. + t.join(10) + del t + + # Free the background main greenlet by forcing greenlet to notice a difference. + greenlet.getcurrent() + greenlets_after = count_objects(greenlet.greenlet) + + lists_after = count_objects() + # On 2.7, we observe that lists_after is smaller than + # lists_before. No idea what lists got cleaned up. All the + # Python 3 versions match exactly. + self.assertLessEqual(lists_after, lists_before) + + self.assertEqual(greenlets_before, greenlets_after) + + @unittest.expectedFailure + def test_issue251_issue252_need_to_collect_in_background(self): + # This still fails because the leak of the list + # still exists when we don't call a greenlet API before exiting the + # thread. The proximate cause is that neither of the two greenlets + # from the background thread are actually being destroyed, even though + # the GC is in fact visiting both objects. + # It's not clear where that leak is? For some reason the thread-local dict + # holding it isn't being cleaned up. + self.test_issue251_killing_cross_thread_leaks_list(manually_collect_background=False) diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/test_stack_saved.py b/venv/lib/python3.10/site-packages/greenlet/tests/test_stack_saved.py new file mode 100644 index 0000000..6c7353b --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/test_stack_saved.py @@ -0,0 +1,19 @@ +import greenlet +import unittest + + +class Test(unittest.TestCase): + + def test_stack_saved(self): + main = greenlet.getcurrent() + self.assertEqual(main._stack_saved, 0) + + def func(): + main.switch(main._stack_saved) + + g = greenlet.greenlet(func) + x = g.switch() + assert x > 0, x + assert g._stack_saved > 0, g._stack_saved + g.switch() + assert g._stack_saved == 0, g._stack_saved diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/test_throw.py b/venv/lib/python3.10/site-packages/greenlet/tests/test_throw.py new file mode 100644 index 0000000..a2014a9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/test_throw.py @@ -0,0 +1,100 @@ +import sys +import unittest + +from greenlet import greenlet + + +def switch(*args): + return greenlet.getcurrent().parent.switch(*args) + + +class ThrowTests(unittest.TestCase): + def test_class(self): + def f(): + try: + switch("ok") + except RuntimeError: + switch("ok") + return + switch("fail") + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError) + self.assertEqual(res, "ok") + + def test_val(self): + def f(): + try: + switch("ok") + except RuntimeError: + val = sys.exc_info()[1] + if str(val) == "ciao": + switch("ok") + return + switch("fail") + + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError("ciao")) + self.assertEqual(res, "ok") + + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError, "ciao") + self.assertEqual(res, "ok") + + def test_kill(self): + def f(): + switch("ok") + switch("fail") + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw() + self.assertTrue(isinstance(res, greenlet.GreenletExit)) + self.assertTrue(g.dead) + res = g.throw() # immediately eaten by the already-dead greenlet + self.assertTrue(isinstance(res, greenlet.GreenletExit)) + + def test_throw_goes_to_original_parent(self): + main = greenlet.getcurrent() + + def f1(): + try: + main.switch("f1 ready to catch") + except IndexError: + return "caught" + else: + return "normal exit" + + def f2(): + main.switch("from f2") + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + self.assertRaises(IndexError, g2.throw, IndexError) + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + res = g1.switch() + self.assertEqual(res, "f1 ready to catch") + res = g2.throw(IndexError) + self.assertEqual(res, "caught") + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + res = g1.switch() + self.assertEqual(res, "f1 ready to catch") + res = g2.switch() + self.assertEqual(res, "from f2") + res = g2.throw(IndexError) + self.assertEqual(res, "caught") + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/test_tracing.py b/venv/lib/python3.10/site-packages/greenlet/tests/test_tracing.py new file mode 100644 index 0000000..2ab4d71 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/test_tracing.py @@ -0,0 +1,267 @@ +import sys +import unittest +import greenlet + +class SomeError(Exception): + pass + +class GreenletTracer(object): + oldtrace = None + + def __init__(self, error_on_trace=False): + self.actions = [] + self.error_on_trace = error_on_trace + + def __call__(self, *args): + self.actions.append(args) + if self.error_on_trace: + raise SomeError + + def __enter__(self): + self.oldtrace = greenlet.settrace(self) + return self.actions + + def __exit__(self, *args): + greenlet.settrace(self.oldtrace) + + +class TestGreenletTracing(unittest.TestCase): + """ + Tests of ``greenlet.settrace()`` + """ + + def test_greenlet_tracing(self): + main = greenlet.getcurrent() + def dummy(): + pass + def dummyexc(): + raise SomeError() + + with GreenletTracer() as actions: + g1 = greenlet.greenlet(dummy) + g1.switch() + g2 = greenlet.greenlet(dummyexc) + self.assertRaises(SomeError, g2.switch) + + self.assertEqual(actions, [ + ('switch', (main, g1)), + ('switch', (g1, main)), + ('switch', (main, g2)), + ('throw', (g2, main)), + ]) + + def test_exception_disables_tracing(self): + main = greenlet.getcurrent() + def dummy(): + main.switch() + g = greenlet.greenlet(dummy) + g.switch() + with GreenletTracer(error_on_trace=True) as actions: + self.assertRaises(SomeError, g.switch) + self.assertEqual(greenlet.gettrace(), None) + + self.assertEqual(actions, [ + ('switch', (main, g)), + ]) + + +class PythonTracer(object): + oldtrace = None + + def __init__(self): + self.actions = [] + + def __call__(self, frame, event, arg): + # Record the co_name so we have an idea what function we're in. + self.actions.append((event, frame.f_code.co_name)) + + def __enter__(self): + self.oldtrace = sys.setprofile(self) + return self.actions + + def __exit__(self, *args): + sys.setprofile(self.oldtrace) + +def tpt_callback(): + return 42 + +class TestPythonTracing(unittest.TestCase): + """ + Tests of the interaction of ``sys.settrace()`` + with greenlet facilities. + + NOTE: Most of this is probably CPython specific. + """ + + maxDiff = None + + def test_trace_events_trivial(self): + with PythonTracer() as actions: + tpt_callback() + # If we use the sys.settrace instead of setprofile, we get + # this: + + # self.assertEqual(actions, [ + # ('call', 'tpt_callback'), + # ('call', '__exit__'), + # ]) + + self.assertEqual(actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + def _trace_switch(self, glet): + with PythonTracer() as actions: + glet.switch() + return actions + + def _check_trace_events_func_already_set(self, glet): + actions = self._trace_switch(glet) + self.assertEqual(actions, [ + ('return', '__enter__'), + ('c_call', '_trace_switch'), + ('call', 'run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('return', 'run'), + ('c_return', '_trace_switch'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + def test_trace_events_into_greenlet_func_already_set(self): + def run(): + return tpt_callback() + + self._check_trace_events_func_already_set(greenlet.greenlet(run)) + + def test_trace_events_into_greenlet_subclass_already_set(self): + class X(greenlet.greenlet): + def run(self): + return tpt_callback() + self._check_trace_events_func_already_set(X()) + + def _check_trace_events_from_greenlet_sets_profiler(self, g, tracer): + g.switch() + tpt_callback() + tracer.__exit__() + self.assertEqual(tracer.actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('return', 'run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + + def test_trace_events_from_greenlet_func_sets_profiler(self): + tracer = PythonTracer() + def run(): + tracer.__enter__() + return tpt_callback() + + self._check_trace_events_from_greenlet_sets_profiler(greenlet.greenlet(run), + tracer) + + def test_trace_events_from_greenlet_subclass_sets_profiler(self): + tracer = PythonTracer() + class X(greenlet.greenlet): + def run(self): + tracer.__enter__() + return tpt_callback() + + self._check_trace_events_from_greenlet_sets_profiler(X(), tracer) + + + def test_trace_events_multiple_greenlets_switching(self): + tracer = PythonTracer() + + g1 = None + g2 = None + + def g1_run(): + tracer.__enter__() + tpt_callback() + g2.switch() + tpt_callback() + return 42 + + def g2_run(): + tpt_callback() + tracer.__exit__() + tpt_callback() + g1.switch() + + g1 = greenlet.greenlet(g1_run) + g2 = greenlet.greenlet(g2_run) + + x = g1.switch() + self.assertEqual(x, 42) + tpt_callback() # ensure not in the trace + self.assertEqual(tracer.actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('c_call', 'g1_run'), + ('call', 'g2_run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + def test_trace_events_multiple_greenlets_switching_siblings(self): + # Like the first version, but get both greenlets running first + # as "siblings" and then establish the tracing. + tracer = PythonTracer() + + g1 = None + g2 = None + + def g1_run(): + greenlet.getcurrent().parent.switch() + tracer.__enter__() + tpt_callback() + g2.switch() + tpt_callback() + return 42 + + def g2_run(): + greenlet.getcurrent().parent.switch() + + tpt_callback() + tracer.__exit__() + tpt_callback() + g1.switch() + + g1 = greenlet.greenlet(g1_run) + g2 = greenlet.greenlet(g2_run) + + # Start g1 + g1.switch() + # And it immediately returns control to us. + # Start g2 + g2.switch() + # Which also returns. Now kick of the real part of the + # test. + x = g1.switch() + self.assertEqual(x, 42) + + tpt_callback() # ensure not in the trace + self.assertEqual(tracer.actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('c_call', 'g1_run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/test_version.py b/venv/lib/python3.10/site-packages/greenlet/tests/test_version.py new file mode 100644 index 0000000..0c9a497 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/test_version.py @@ -0,0 +1,39 @@ +#! /usr/bin/env python +from __future__ import absolute_import +from __future__ import print_function + +import sys +import os +import unittest + +import greenlet + +class VersionTests(unittest.TestCase): + def test_version(self): + def find_dominating_file(name): + if os.path.exists(name): + return name + + tried = [] + here = os.path.abspath(os.path.dirname(__file__)) + for i in range(10): + up = ['..'] * i + path = [here] + up + [name] + fname = os.path.join(*path) + fname = os.path.abspath(fname) + tried.append(fname) + if os.path.exists(fname): + return fname + raise AssertionError("Could not find file " + name + "; checked " + str(tried)) + + try: + setup_py = find_dominating_file('setup.py') + except AssertionError as e: + raise unittest.SkipTest("Unable to find setup.py; must be out of tree. " + str(e)) + + + invoke_setup = "%s %s --version" % (sys.executable, setup_py) + with os.popen(invoke_setup) as f: + sversion = f.read().strip() + + self.assertEqual(sversion, greenlet.__version__) diff --git a/venv/lib/python3.10/site-packages/greenlet/tests/test_weakref.py b/venv/lib/python3.10/site-packages/greenlet/tests/test_weakref.py new file mode 100644 index 0000000..6a2ff06 --- /dev/null +++ b/venv/lib/python3.10/site-packages/greenlet/tests/test_weakref.py @@ -0,0 +1,34 @@ +import gc +import greenlet +import weakref +import unittest + + +class WeakRefTests(unittest.TestCase): + def test_dead_weakref(self): + def _dead_greenlet(): + g = greenlet.greenlet(lambda: None) + g.switch() + return g + o = weakref.ref(_dead_greenlet()) + gc.collect() + self.assertEqual(o(), None) + + def test_inactive_weakref(self): + o = weakref.ref(greenlet.greenlet()) + gc.collect() + self.assertEqual(o(), None) + + def test_dealloc_weakref(self): + seen = [] + def worker(): + try: + greenlet.getcurrent().parent.switch() + finally: + seen.append(g()) + g = greenlet.greenlet(worker) + g.switch() + g2 = greenlet.greenlet(lambda: None, g) + g = weakref.ref(g2) + g2 = None + self.assertEqual(seen, [None]) diff --git a/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/LICENSE.txt b/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..8f080ea --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 Nathaniel J. Smith and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/METADATA new file mode 100644 index 0000000..8c77b40 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/METADATA @@ -0,0 +1,197 @@ +Metadata-Version: 2.1 +Name: h11 +Version: 0.13.0 +Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 +Home-page: https://github.com/python-hyper/h11 +Author: Nathaniel J. Smith +Author-email: njs@pobox.com +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: System :: Networking +Requires-Python: >=3.6 +License-File: LICENSE.txt +Requires-Dist: dataclasses ; python_version < "3.7" +Requires-Dist: typing-extensions ; python_version < "3.8" + +h11 +=== + +.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master + :target: https://travis-ci.org/python-hyper/h11 + :alt: Automated test status + +.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-hyper/h11 + :alt: Test coverage + +.. image:: https://readthedocs.org/projects/h11/badge/?version=latest + :target: http://h11.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +This is a little HTTP/1.1 library written from scratch in Python, +heavily inspired by `hyper-h2 `_. + +It's a "bring-your-own-I/O" library; h11 contains no IO code +whatsoever. This means you can hook h11 up to your favorite network +API, and that could be anything you want: synchronous, threaded, +asynchronous, or your own implementation of `RFC 6214 +`_ -- h11 won't judge you. +(Compare this to the current state of the art, where every time a `new +network API `_ comes along then someone +gets to start over reimplementing the entire HTTP protocol from +scratch.) Cory Benfield made an `excellent blog post describing the +benefits of this approach +`_, or if you like video +then here's his `PyCon 2016 talk on the same theme +`_. + +This also means that h11 is not immediately useful out of the box: +it's a toolkit for building programs that speak HTTP, not something +that could directly replace ``requests`` or ``twisted.web`` or +whatever. But h11 makes it much easier to implement something like +``requests`` or ``twisted.web``. + +At a high level, working with h11 goes like this: + +1) First, create an ``h11.Connection`` object to track the state of a + single HTTP/1.1 connection. + +2) When you read data off the network, pass it to + ``conn.receive_data(...)``; you'll get back a list of objects + representing high-level HTTP "events". + +3) When you want to send a high-level HTTP event, create the + corresponding "event" object and pass it to ``conn.send(...)``; + this will give you back some bytes that you can then push out + through the network. + +For example, a client might instantiate and then send a +``h11.Request`` object, then zero or more ``h11.Data`` objects for the +request body (e.g., if this is a POST), and then a +``h11.EndOfMessage`` to indicate the end of the message. Then the +server would then send back a ``h11.Response``, some ``h11.Data``, and +its own ``h11.EndOfMessage``. If either side violates the protocol, +you'll get a ``h11.ProtocolError`` exception. + +h11 is suitable for implementing both servers and clients, and has a +pleasantly symmetric API: the events you send as a client are exactly +the ones that you receive as a server and vice-versa. + +`Here's an example of a tiny HTTP client +`_ + +It also has `a fine manual `_. + +FAQ +--- + +*Whyyyyy?* + +I wanted to play with HTTP in `Curio +`__ and `Trio +`__, which at the time didn't have any +HTTP libraries. So I thought, no big deal, Python has, like, a dozen +different implementations of HTTP, surely I can find one that's +reusable. I didn't find one, but I did find Cory's call-to-arms +blog-post. So I figured, well, fine, if I have to implement HTTP from +scratch, at least I can make sure no-one *else* has to ever again. + +*Should I use it?* + +Maybe. You should be aware that it's a very young project. But, it's +feature complete and has an exhaustive test-suite and complete docs, +so the next step is for people to try using it and see how it goes +:-). If you do then please let us know -- if nothing else we'll want +to talk to you before making any incompatible changes! + +*What are the features/limitations?* + +Roughly speaking, it's trying to be a robust, complete, and non-hacky +implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: +HTTP/1.1 Message Syntax and Routing +`_. That is, it mostly focuses on +implementing HTTP at the level of taking bytes on and off the wire, +and the headers related to that, and tries to be anal about spec +conformance. It doesn't know about higher-level concerns like URL +routing, conditional GETs, cross-origin cookie policies, or content +negotiation. But it does know how to take care of framing, +cross-version differences in keep-alive handling, and the "obsolete +line folding" rule, so you can focus your energies on the hard / +interesting parts for your application, and it tries to support the +full specification in the sense that any useful HTTP/1.1 conformant +application should be able to use h11. + +It's pure Python, and has no dependencies outside of the standard +library. + +It has a test suite with 100.0% coverage for both statements and +branches. + +Currently it supports Python 3 (testing on 3.6-3.9) and PyPy 3. +The last Python 2-compatible version was h11 0.11.x. +(Originally it had a Cython wrapper for `http-parser +`_ and a beautiful nested state +machine implemented with ``yield from`` to postprocess the output. But +I had to take these out -- the new *parser* needs fewer lines-of-code +than the old *parser wrapper*, is written in pure Python, uses no +exotic language syntax, and has more features. It's sad, really; that +old state machine was really slick. I just need a few sentences here +to mourn that.) + +I don't know how fast it is. I haven't benchmarked or profiled it yet, +so it's probably got a few pointless hot spots, and I've been trying +to err on the side of simplicity and robustness instead of +micro-optimization. But at the architectural level I tried hard to +avoid fundamentally bad decisions, e.g., I believe that all the +parsing algorithms remain linear-time even in the face of pathological +input like slowloris, and there are no byte-by-byte loops. (I also +believe that it maintains bounded memory usage in the face of +arbitrary/pathological input.) + +The whole library is ~800 lines-of-code. You can read and understand +the whole thing in less than an hour. Most of the energy invested in +this so far has been spent on trying to keep things simple by +minimizing special-cases and ad hoc state manipulation; even though it +is now quite small and simple, I'm still annoyed that I haven't +figured out how to make it even smaller and simpler. (Unfortunately, +HTTP does not lend itself to simplicity.) + +The API is ~feature complete and I don't expect the general outlines +to change much, but you can't judge an API's ergonomics until you +actually document and use it, so I'd expect some changes in the +details. + +*How do I try it?* + +.. code-block:: sh + + $ pip install h11 + $ git clone git@github.com:python-hyper/h11 + $ cd h11/examples + $ python basic-client.py + +and go from there. + +*License?* + +MIT + +*Code of conduct?* + +Contributors are requested to follow our `code of conduct +`_ in +all project spaces. + + diff --git a/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/RECORD new file mode 100644 index 0000000..856e13e --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/RECORD @@ -0,0 +1,52 @@ +h11-0.13.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +h11-0.13.0.dist-info/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 +h11-0.13.0.dist-info/METADATA,sha256=Fd9foEJycn0gUB9YsXul6neMlYnEU0MRQ8IUBsSOHxE,8245 +h11-0.13.0.dist-info/RECORD,, +h11-0.13.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +h11-0.13.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 +h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507 +h11/__pycache__/__init__.cpython-310.pyc,, +h11/__pycache__/_abnf.cpython-310.pyc,, +h11/__pycache__/_connection.cpython-310.pyc,, +h11/__pycache__/_events.cpython-310.pyc,, +h11/__pycache__/_headers.cpython-310.pyc,, +h11/__pycache__/_readers.cpython-310.pyc,, +h11/__pycache__/_receivebuffer.cpython-310.pyc,, +h11/__pycache__/_state.cpython-310.pyc,, +h11/__pycache__/_util.cpython-310.pyc,, +h11/__pycache__/_version.cpython-310.pyc,, +h11/__pycache__/_writers.cpython-310.pyc,, +h11/_abnf.py,sha256=tMKqgOEkTHHp8sPd_gmU9Qowe_yXXrihct63RX2zJsg,4637 +h11/_connection.py,sha256=udHjqEO1fOcQUKa3hYIw88DMeoyG4fxiIXKjgE4DwJw,26480 +h11/_events.py,sha256=LEfuvg1AbhHaVRwxCd0I-pFn9-ezUOaoL8o2Kvy1PBA,11816 +h11/_headers.py,sha256=tRwZuFy5Wj4Yi9VVad_s7EqwCgeN6O3TIbcHd5CN_GI,10230 +h11/_readers.py,sha256=TWWoSbLVBfYGzD5dunReTd2QCxz466wjwu-4Fkzk_sQ,8370 +h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252 +h11/_state.py,sha256=F8MPHIFMJV3kUPYR3YjrjqjJ1AYp_FZ38UwGr0855lE,13184 +h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888 +h11/_version.py,sha256=ye-8iNs3P1TB71VRGlNQe2OxnAe-RupjozAMywAS5z8,686 +h11/_writers.py,sha256=7WBTXyJqFAUqqmLl5adGF8_7UVQdOVa2phL8s8csljI,5063 +h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 +h11/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +h11/tests/__pycache__/__init__.cpython-310.pyc,, +h11/tests/__pycache__/helpers.cpython-310.pyc,, +h11/tests/__pycache__/test_against_stdlib_http.cpython-310.pyc,, +h11/tests/__pycache__/test_connection.cpython-310.pyc,, +h11/tests/__pycache__/test_events.cpython-310.pyc,, +h11/tests/__pycache__/test_headers.cpython-310.pyc,, +h11/tests/__pycache__/test_helpers.cpython-310.pyc,, +h11/tests/__pycache__/test_io.cpython-310.pyc,, +h11/tests/__pycache__/test_receivebuffer.cpython-310.pyc,, +h11/tests/__pycache__/test_state.cpython-310.pyc,, +h11/tests/__pycache__/test_util.cpython-310.pyc,, +h11/tests/data/test-file,sha256=ZJ03Rqs98oJw29OHzJg7LlMzyGQaRAY0r3AqBeM2wVU,65 +h11/tests/helpers.py,sha256=a1EVG_p7xU4wRsa3tMPTRxuaKCmretok9sxXWvqfmQA,3355 +h11/tests/test_against_stdlib_http.py,sha256=cojCHgHXFQ8gWhNlEEwl3trmOpN-5uDukRoHnElqo3A,3995 +h11/tests/test_connection.py,sha256=ZbPLDPclKvjgjAhgk-WlCPBaf17c4XUIV2tpaW08jOI,38720 +h11/tests/test_events.py,sha256=LPVLbcV-NvPNK9fW3rraR6Bdpz1hAlsWubMtNaJ5gHg,4657 +h11/tests/test_headers.py,sha256=qd8T1Zenuz5GbD6wklSJ5G8VS7trrYgMV0jT-SMvqg8,5612 +h11/tests/test_helpers.py,sha256=kAo0CEM4LGqmyyP2ZFmhsyq3UFJqoFfAbzu3hbWreRM,794 +h11/tests/test_io.py,sha256=gXFSKpcx6n3-Ld0Y8w5kBkom1LZsCq3uHtqdotQ3S2c,16243 +h11/tests/test_receivebuffer.py,sha256=3jGbeJM36Akqg_pAhPb7XzIn2NS6RhPg-Ryg8Eu6ytk,3454 +h11/tests/test_state.py,sha256=rqll9WqFsJPE0zSrtCn9LH659mPKsDeXZ-DwXwleuBQ,8928 +h11/tests/test_util.py,sha256=ZWdRng_P-JP-cnvmcBhazBxfyWmEKBB0NLrDy5eq3h0,2970 diff --git a/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/top_level.txt new file mode 100644 index 0000000..0d24def --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11-0.13.0.dist-info/top_level.txt @@ -0,0 +1 @@ +h11 diff --git a/venv/lib/python3.10/site-packages/h11/__init__.py b/venv/lib/python3.10/site-packages/h11/__init__.py new file mode 100644 index 0000000..989e92c --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/__init__.py @@ -0,0 +1,62 @@ +# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), +# containing no networking code at all, loosely modelled on hyper-h2's generic +# implementation of HTTP/2 (and in particular the h2.connection.H2Connection +# class). There's still a bunch of subtle details you need to get right if you +# want to make this actually useful, because it doesn't implement all the +# semantics to check that what you're asking to write to the wire is sensible, +# but at least it gets you out of dealing with the wire itself. + +from h11._connection import Connection, NEED_DATA, PAUSED +from h11._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from h11._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError +from h11._version import __version__ + +PRODUCT_ID = "python-h11/" + __version__ + + +__all__ = ( + "Connection", + "NEED_DATA", + "PAUSED", + "ConnectionClosed", + "Data", + "EndOfMessage", + "Event", + "InformationalResponse", + "Request", + "Response", + "CLIENT", + "CLOSED", + "DONE", + "ERROR", + "IDLE", + "MUST_CLOSE", + "SEND_BODY", + "SEND_RESPONSE", + "SERVER", + "SWITCHED_PROTOCOL", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", +) diff --git a/venv/lib/python3.10/site-packages/h11/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..9a99e2b Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/__pycache__/_abnf.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/__pycache__/_abnf.cpython-310.pyc new file mode 100644 index 0000000..b7353e5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/__pycache__/_abnf.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/__pycache__/_connection.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/__pycache__/_connection.cpython-310.pyc new file mode 100644 index 0000000..d8c17a8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/__pycache__/_connection.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/__pycache__/_events.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/__pycache__/_events.cpython-310.pyc new file mode 100644 index 0000000..c5da74d Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/__pycache__/_events.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/__pycache__/_headers.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/__pycache__/_headers.cpython-310.pyc new file mode 100644 index 0000000..eaff98a Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/__pycache__/_headers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/__pycache__/_readers.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/__pycache__/_readers.cpython-310.pyc new file mode 100644 index 0000000..21cb5e9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/__pycache__/_readers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/__pycache__/_receivebuffer.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/__pycache__/_receivebuffer.cpython-310.pyc new file mode 100644 index 0000000..581696a Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/__pycache__/_receivebuffer.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/__pycache__/_state.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/__pycache__/_state.cpython-310.pyc new file mode 100644 index 0000000..c16fb51 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/__pycache__/_state.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/__pycache__/_util.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/__pycache__/_util.cpython-310.pyc new file mode 100644 index 0000000..428e33e Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/__pycache__/_util.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/__pycache__/_version.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/__pycache__/_version.cpython-310.pyc new file mode 100644 index 0000000..a5c36b9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/__pycache__/_version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/__pycache__/_writers.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/__pycache__/_writers.cpython-310.pyc new file mode 100644 index 0000000..d6c9e95 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/__pycache__/_writers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/_abnf.py b/venv/lib/python3.10/site-packages/h11/_abnf.py new file mode 100644 index 0000000..e6d49e1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/_abnf.py @@ -0,0 +1,129 @@ +# We use native strings for all the re patterns, to take advantage of string +# formatting, and then convert to bytestrings when compiling the final re +# objects. + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace +# OWS = *( SP / HTAB ) +# ; optional whitespace +OWS = r"[ \t]*" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators +# token = 1*tchar +# +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" +# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" +# / DIGIT / ALPHA +# ; any VCHAR, except delimiters +token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields +# field-name = token +field_name = token + +# The standard says: +# +# field-value = *( field-content / obs-fold ) +# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] +# field-vchar = VCHAR / obs-text +# obs-fold = CRLF 1*( SP / HTAB ) +# ; obsolete line folding +# ; see Section 3.2.4 +# +# https://tools.ietf.org/html/rfc5234#appendix-B.1 +# +# VCHAR = %x21-7E +# ; visible (printing) characters +# +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string +# obs-text = %x80-FF +# +# However, the standard definition of field-content is WRONG! It disallows +# fields containing a single visible character surrounded by whitespace, +# e.g. "foo a bar". +# +# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 +# +# So our definition of field_content attempts to fix it up... +# +# Also, we allow lots of control characters, because apparently people assume +# that they're legal in practice (e.g., google analytics makes cookies with +# \x01 in them!): +# https://github.com/python-hyper/h11/issues/57 +# We still don't allow NUL or whitespace, because those are often treated as +# meta-characters and letting them through can lead to nasty issues like SSRF. +vchar = r"[\x21-\x7e]" +vchar_or_obs_text = r"[^\x00\s]" +field_vchar = vchar_or_obs_text +field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) + +# We handle obs-fold at a different level, and our fixed-up field_content +# already grows to swallow the whole value, so ? instead of * +field_value = r"({field_content})?".format(**globals()) + +# header-field = field-name ":" OWS field-value OWS +header_field = ( + r"(?P{field_name})" + r":" + r"{OWS}" + r"(?P{field_value})" + r"{OWS}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line +# +# request-line = method SP request-target SP HTTP-version CRLF +# method = token +# HTTP-version = HTTP-name "/" DIGIT "." DIGIT +# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive +# +# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full +# URL, host+port (for connect), or even "*", but in any case we are guaranteed +# that it contists of the visible printing characters. +method = token +request_target = r"{vchar}+".format(**globals()) +http_version = r"HTTP/(?P[0-9]\.[0-9])" +request_line = ( + r"(?P{method})" + r" " + r"(?P{request_target})" + r" " + r"{http_version}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line +# +# status-line = HTTP-version SP status-code SP reason-phrase CRLF +# status-code = 3DIGIT +# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) +status_code = r"[0-9]{3}" +reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) +status_line = ( + r"{http_version}" + r" " + r"(?P{status_code})" + # However, there are apparently a few too many servers out there that just + # leave out the reason phrase: + # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 + # https://github.com/seanmonstar/httparse/issues/29 + # so make it optional. ?: is a non-capturing group. + r"(?: (?P{reason_phrase}))?".format(**globals()) +) + +HEXDIG = r"[0-9A-Fa-f]" +# Actually +# +# chunk-size = 1*HEXDIG +# +# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 +chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) +# Actually +# +# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) +# +# but we aren't parsing the things so we don't really care. +chunk_ext = r";.*" +chunk_header = ( + r"(?P{chunk_size})" + r"(?P{chunk_ext})?" + r"\r\n".format(**globals()) +) diff --git a/venv/lib/python3.10/site-packages/h11/_connection.py b/venv/lib/python3.10/site-packages/h11/_connection.py new file mode 100644 index 0000000..d11386f --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/_connection.py @@ -0,0 +1,631 @@ +# This contains the main Connection class. Everything in h11 revolves around +# this. +from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Type, Union + +from ._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from ._headers import get_comma_header, has_expect_100_continue, set_comma_header +from ._readers import READERS, ReadersType +from ._receivebuffer import ReceiveBuffer +from ._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + ConnectionState, + DONE, + ERROR, + MIGHT_SWITCH_PROTOCOL, + SEND_BODY, + SERVER, + SWITCHED_PROTOCOL, +) +from ._util import ( # Import the internal things we need + LocalProtocolError, + RemoteProtocolError, + Sentinel, +) +from ._writers import WRITERS, WritersType + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = ["Connection", "NEED_DATA", "PAUSED"] + + +class NEED_DATA(Sentinel, metaclass=Sentinel): + pass + + +class PAUSED(Sentinel, metaclass=Sentinel): + pass + + +# If we ever have this much buffered without it making a complete parseable +# event, we error out. The only time we really buffer is when reading the +# request/response line + headers together, so this is effectively the limit on +# the size of that. +# +# Some precedents for defaults: +# - node.js: 80 * 1024 +# - tomcat: 8 * 1024 +# - IIS: 16 * 1024 +# - Apache: <8 KiB per line> +DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 + +# RFC 7230's rules for connection lifecycles: +# - If either side says they want to close the connection, then the connection +# must close. +# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close +# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive +# (and even this is a mess -- e.g. if you're implementing a proxy then +# sending Connection: keep-alive is forbidden). +# +# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So +# our rule is: +# - If someone says Connection: close, we will close +# - If someone uses HTTP/1.0, we will close. +def _keep_alive(event: Union[Request, Response]) -> bool: + connection = get_comma_header(event.headers, b"connection") + if b"close" in connection: + return False + if getattr(event, "http_version", b"1.1") < b"1.1": + return False + return True + + +def _body_framing( + request_method: bytes, event: Union[Request, Response] +) -> Tuple[str, Union[Tuple[()], Tuple[int]]]: + # Called when we enter SEND_BODY to figure out framing information for + # this body. + # + # These are the only two events that can trigger a SEND_BODY state: + assert type(event) in (Request, Response) + # Returns one of: + # + # ("content-length", count) + # ("chunked", ()) + # ("http/1.0", ()) + # + # which are (lookup key, *args) for constructing body reader/writer + # objects. + # + # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 + # + # Step 1: some responses always have an empty body, regardless of what the + # headers say. + if type(event) is Response: + if ( + event.status_code in (204, 304) + or request_method == b"HEAD" + or (request_method == b"CONNECT" and 200 <= event.status_code < 300) + ): + return ("content-length", (0,)) + # Section 3.3.3 also lists another case -- responses with status_code + # < 200. For us these are InformationalResponses, not Responses, so + # they can't get into this function in the first place. + assert event.status_code >= 200 + + # Step 2: check for Transfer-Encoding (T-E beats C-L): + transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") + if transfer_encodings: + assert transfer_encodings == [b"chunked"] + return ("chunked", ()) + + # Step 3: check for Content-Length + content_lengths = get_comma_header(event.headers, b"content-length") + if content_lengths: + return ("content-length", (int(content_lengths[0]),)) + + # Step 4: no applicable headers; fallback/default depends on type + if type(event) is Request: + return ("content-length", (0,)) + else: + return ("http/1.0", ()) + + +################################################################ +# +# The main Connection class +# +################################################################ + + +class Connection: + """An object encapsulating the state of an HTTP connection. + + Args: + our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If + you're implementing a server, pass :data:`h11.SERVER`. + + max_incomplete_event_size (int): + The maximum number of bytes we're willing to buffer of an + incomplete event. In practice this mostly sets a limit on the + maximum size of the request/response line + headers. If this is + exceeded, then :meth:`next_event` will raise + :exc:`RemoteProtocolError`. + + """ + + def __init__( + self, + our_role: Type[Sentinel], + max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, + ) -> None: + self._max_incomplete_event_size = max_incomplete_event_size + # State and role tracking + if our_role not in (CLIENT, SERVER): + raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role)) + self.our_role = our_role + self.their_role: Type[Sentinel] + if our_role is CLIENT: + self.their_role = SERVER + else: + self.their_role = CLIENT + self._cstate = ConnectionState() + + # Callables for converting data->events or vice-versa given the + # current state + self._writer = self._get_io_object(self.our_role, None, WRITERS) + self._reader = self._get_io_object(self.their_role, None, READERS) + + # Holds any unprocessed received data + self._receive_buffer = ReceiveBuffer() + # If this is true, then it indicates that the incoming connection was + # closed *after* the end of whatever's in self._receive_buffer: + self._receive_buffer_closed = False + + # Extra bits of state that don't fit into the state machine. + # + # These two are only used to interpret framing headers for figuring + # out how to read/write response bodies. their_http_version is also + # made available as a convenient public API. + self.their_http_version: Optional[bytes] = None + self._request_method: Optional[bytes] = None + # This is pure flow-control and doesn't at all affect the set of legal + # transitions, so no need to bother ConnectionState with it: + self.client_is_waiting_for_100_continue = False + + @property + def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]: + """A dictionary like:: + + {CLIENT: , SERVER: } + + See :ref:`state-machine` for details. + + """ + return dict(self._cstate.states) + + @property + def our_state(self) -> Type[Sentinel]: + """The current state of whichever role we are playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.our_role] + + @property + def their_state(self) -> Type[Sentinel]: + """The current state of whichever role we are NOT playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.their_role] + + @property + def they_are_waiting_for_100_continue(self) -> bool: + return self.their_role is CLIENT and self.client_is_waiting_for_100_continue + + def start_next_cycle(self) -> None: + """Attempt to reset our connection state for a new request/response + cycle. + + If both client and server are in :data:`DONE` state, then resets them + both to :data:`IDLE` state in preparation for a new request/response + cycle on this same connection. Otherwise, raises a + :exc:`LocalProtocolError`. + + See :ref:`keepalive-and-pipelining`. + + """ + old_states = dict(self._cstate.states) + self._cstate.start_next_cycle() + self._request_method = None + # self.their_http_version gets left alone, since it presumably lasts + # beyond a single request/response cycle + assert not self.client_is_waiting_for_100_continue + self._respond_to_state_changes(old_states) + + def _process_error(self, role: Type[Sentinel]) -> None: + old_states = dict(self._cstate.states) + self._cstate.process_error(role) + self._respond_to_state_changes(old_states) + + def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]: + if type(event) is InformationalResponse and event.status_code == 101: + return _SWITCH_UPGRADE + if type(event) is Response: + if ( + _SWITCH_CONNECT in self._cstate.pending_switch_proposals + and 200 <= event.status_code < 300 + ): + return _SWITCH_CONNECT + return None + + # All events go through here + def _process_event(self, role: Type[Sentinel], event: Event) -> None: + # First, pass the event through the state machine to make sure it + # succeeds. + old_states = dict(self._cstate.states) + if role is CLIENT and type(event) is Request: + if event.method == b"CONNECT": + self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) + if get_comma_header(event.headers, b"upgrade"): + self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) + server_switch_event = None + if role is SERVER: + server_switch_event = self._server_switch_event(event) + self._cstate.process_event(role, type(event), server_switch_event) + + # Then perform the updates triggered by it. + + if type(event) is Request: + self._request_method = event.method + + if role is self.their_role and type(event) in ( + Request, + Response, + InformationalResponse, + ): + event = cast(Union[Request, Response, InformationalResponse], event) + self.their_http_version = event.http_version + + # Keep alive handling + # + # RFC 7230 doesn't really say what one should do if Connection: close + # shows up on a 1xx InformationalResponse. I think the idea is that + # this is not supposed to happen. In any case, if it does happen, we + # ignore it. + if type(event) in (Request, Response) and not _keep_alive( + cast(Union[Request, Response], event) + ): + self._cstate.process_keep_alive_disabled() + + # 100-continue + if type(event) is Request and has_expect_100_continue(event): + self.client_is_waiting_for_100_continue = True + if type(event) in (InformationalResponse, Response): + self.client_is_waiting_for_100_continue = False + if role is CLIENT and type(event) in (Data, EndOfMessage): + self.client_is_waiting_for_100_continue = False + + self._respond_to_state_changes(old_states, event) + + def _get_io_object( + self, + role: Type[Sentinel], + event: Optional[Event], + io_dict: Union[ReadersType, WritersType], + ) -> Optional[Callable[..., Any]]: + # event may be None; it's only used when entering SEND_BODY + state = self._cstate.states[role] + if state is SEND_BODY: + # Special case: the io_dict has a dict of reader/writer factories + # that depend on the request/response framing. + framing_type, args = _body_framing( + cast(bytes, self._request_method), cast(Union[Request, Response], event) + ) + return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index] + else: + # General case: the io_dict just has the appropriate reader/writer + # for this state + return io_dict.get((role, state)) # type: ignore + + # This must be called after any action that might have caused + # self._cstate.states to change. + def _respond_to_state_changes( + self, + old_states: Dict[Type[Sentinel], Type[Sentinel]], + event: Optional[Event] = None, + ) -> None: + # Update reader/writer + if self.our_state != old_states[self.our_role]: + self._writer = self._get_io_object(self.our_role, event, WRITERS) + if self.their_state != old_states[self.their_role]: + self._reader = self._get_io_object(self.their_role, event, READERS) + + @property + def trailing_data(self) -> Tuple[bytes, bool]: + """Data that has been received, but not yet processed, represented as + a tuple with two elements, where the first is a byte-string containing + the unprocessed data itself, and the second is a bool that is True if + the receive connection was closed. + + See :ref:`switching-protocols` for discussion of why you'd want this. + """ + return (bytes(self._receive_buffer), self._receive_buffer_closed) + + def receive_data(self, data: bytes) -> None: + """Add data to our internal receive buffer. + + This does not actually do any processing on the data, just stores + it. To trigger processing, you have to call :meth:`next_event`. + + Args: + data (:term:`bytes-like object`): + The new data that was just received. + + Special case: If *data* is an empty byte-string like ``b""``, + then this indicates that the remote side has closed the + connection (end of file). Normally this is convenient, because + standard Python APIs like :meth:`file.read` or + :meth:`socket.recv` use ``b""`` to indicate end-of-file, while + other failures to read are indicated using other mechanisms + like raising :exc:`TimeoutError`. When using such an API you + can just blindly pass through whatever you get from ``read`` + to :meth:`receive_data`, and everything will work. + + But, if you have an API where reading an empty string is a + valid non-EOF condition, then you need to be aware of this and + make sure to check for such strings and avoid passing them to + :meth:`receive_data`. + + Returns: + Nothing, but after calling this you should call :meth:`next_event` + to parse the newly received data. + + Raises: + RuntimeError: + Raised if you pass an empty *data*, indicating EOF, and then + pass a non-empty *data*, indicating more data that somehow + arrived after the EOF. + + (Calling ``receive_data(b"")`` multiple times is fine, + and equivalent to calling it once.) + + """ + if data: + if self._receive_buffer_closed: + raise RuntimeError("received close, then received more data?") + self._receive_buffer += data + else: + self._receive_buffer_closed = True + + def _extract_next_receive_event(self) -> Union[Event, Type[Sentinel]]: + state = self.their_state + # We don't pause immediately when they enter DONE, because even in + # DONE state we can still process a ConnectionClosed() event. But + # if we have data in our buffer, then we definitely aren't getting + # a ConnectionClosed() immediately and we need to pause. + if state is DONE and self._receive_buffer: + return PAUSED + if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: + return PAUSED + assert self._reader is not None + event = self._reader(self._receive_buffer) + if event is None: + if not self._receive_buffer and self._receive_buffer_closed: + # In some unusual cases (basically just HTTP/1.0 bodies), EOF + # triggers an actual protocol event; in that case, we want to + # return that event, and then the state will change and we'll + # get called again to generate the actual ConnectionClosed(). + if hasattr(self._reader, "read_eof"): + event = self._reader.read_eof() # type: ignore[attr-defined] + else: + event = ConnectionClosed() + if event is None: + event = NEED_DATA + return event # type: ignore[no-any-return] + + def next_event(self) -> Union[Event, Type[Sentinel]]: + """Parse the next event out of our receive buffer, update our internal + state, and return it. + + This is a mutating operation -- think of it like calling :func:`next` + on an iterator. + + Returns: + : One of three things: + + 1) An event object -- see :ref:`events`. + + 2) The special constant :data:`NEED_DATA`, which indicates that + you need to read more data from your socket and pass it to + :meth:`receive_data` before this method will be able to return + any more events. + + 3) The special constant :data:`PAUSED`, which indicates that we + are not in a state where we can process incoming data (usually + because the peer has finished their part of the current + request/response cycle, and you have not yet called + :meth:`start_next_cycle`). See :ref:`flow-control` for details. + + Raises: + RemoteProtocolError: + The peer has misbehaved. You should close the connection + (possibly after sending some kind of 4xx response). + + Once this method returns :class:`ConnectionClosed` once, then all + subsequent calls will also return :class:`ConnectionClosed`. + + If this method raises any exception besides :exc:`RemoteProtocolError` + then that's a bug -- if it happens please file a bug report! + + If this method raises any exception then it also sets + :attr:`Connection.their_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + + if self.their_state is ERROR: + raise RemoteProtocolError("Can't receive data when peer state is ERROR") + try: + event = self._extract_next_receive_event() + if event not in [NEED_DATA, PAUSED]: + self._process_event(self.their_role, cast(Event, event)) + if event is NEED_DATA: + if len(self._receive_buffer) > self._max_incomplete_event_size: + # 431 is "Request header fields too large" which is pretty + # much the only situation where we can get here + raise RemoteProtocolError( + "Receive buffer too long", error_status_hint=431 + ) + if self._receive_buffer_closed: + # We're still trying to complete some event, but that's + # never going to happen because no more data is coming + raise RemoteProtocolError("peer unexpectedly closed connection") + return event + except BaseException as exc: + self._process_error(self.their_role) + if isinstance(exc, LocalProtocolError): + exc._reraise_as_remote_protocol_error() + else: + raise + + def send(self, event: Event) -> Optional[bytes]: + """Convert a high-level event into bytes that can be sent to the peer, + while updating our internal state machine. + + Args: + event: The :ref:`event ` to send. + + Returns: + If ``type(event) is ConnectionClosed``, then returns + ``None``. Otherwise, returns a :term:`bytes-like object`. + + Raises: + LocalProtocolError: + Sending this event at this time would violate our + understanding of the HTTP/1.1 protocol. + + If this method raises any exception then it also sets + :attr:`Connection.our_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + data_list = self.send_with_data_passthrough(event) + if data_list is None: + return None + else: + return b"".join(data_list) + + def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]: + """Identical to :meth:`send`, except that in situations where + :meth:`send` returns a single :term:`bytes-like object`, this instead + returns a list of them -- and when sending a :class:`Data` event, this + list is guaranteed to contain the exact object you passed in as + :attr:`Data.data`. See :ref:`sendfile` for discussion. + + """ + if self.our_state is ERROR: + raise LocalProtocolError("Can't send data when our state is ERROR") + try: + if type(event) is Response: + event = self._clean_up_response_headers_for_sending(event) + # We want to call _process_event before calling the writer, + # because if someone tries to do something invalid then this will + # give a sensible error message, while our writers all just assume + # they will only receive valid events. But, _process_event might + # change self._writer. So we have to do a little dance: + writer = self._writer + self._process_event(self.our_role, event) + if type(event) is ConnectionClosed: + return None + else: + # In any situation where writer is None, process_event should + # have raised ProtocolError + assert writer is not None + data_list: List[bytes] = [] + writer(event, data_list.append) + return data_list + except: + self._process_error(self.our_role) + raise + + def send_failed(self) -> None: + """Notify the state machine that we failed to send the data it gave + us. + + This causes :attr:`Connection.our_state` to immediately become + :data:`ERROR` -- see :ref:`error-handling` for discussion. + + """ + self._process_error(self.our_role) + + # When sending a Response, we take responsibility for a few things: + # + # - Sometimes you MUST set Connection: close. We take care of those + # times. (You can also set it yourself if you want, and if you do then + # we'll respect that and close the connection at the right time. But you + # don't have to worry about that unless you want to.) + # + # - The user has to set Content-Length if they want it. Otherwise, for + # responses that have bodies (e.g. not HEAD), then we will automatically + # select the right mechanism for streaming a body of unknown length, + # which depends on depending on the peer's HTTP version. + # + # This function's *only* responsibility is making sure headers are set up + # right -- everything downstream just looks at the headers. There are no + # side channels. + def _clean_up_response_headers_for_sending(self, response: Response) -> Response: + assert type(response) is Response + + headers = response.headers + need_close = False + + # HEAD requests need some special handling: they always act like they + # have Content-Length: 0, and that's how _body_framing treats + # them. But their headers are supposed to match what we would send if + # the request was a GET. (Technically there is one deviation allowed: + # we're allowed to leave out the framing headers -- see + # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as + # easy to get them right.) + method_for_choosing_headers = cast(bytes, self._request_method) + if method_for_choosing_headers == b"HEAD": + method_for_choosing_headers = b"GET" + framing_type, _ = _body_framing(method_for_choosing_headers, response) + if framing_type in ("chunked", "http/1.0"): + # This response has a body of unknown length. + # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked + # If our peer is HTTP/1.0, we use no framing headers, and close the + # connection afterwards. + # + # Make sure to clear Content-Length (in principle user could have + # set both and then we ignored Content-Length b/c + # Transfer-Encoding overwrote it -- this would be naughty of them, + # but the HTTP spec says that if our peer does this then we have + # to fix it instead of erroring out, so we'll accord the user the + # same respect). + headers = set_comma_header(headers, b"content-length", []) + if self.their_http_version is None or self.their_http_version < b"1.1": + # Either we never got a valid request and are sending back an + # error (their_http_version is None), so we assume the worst; + # or else we did get a valid HTTP/1.0 request, so we know that + # they don't understand chunked encoding. + headers = set_comma_header(headers, b"transfer-encoding", []) + # This is actually redundant ATM, since currently we + # unconditionally disable keep-alive when talking to HTTP/1.0 + # peers. But let's be defensive just in case we add + # Connection: keep-alive support later: + if self._request_method != b"HEAD": + need_close = True + else: + headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"]) + + if not self._cstate.keep_alive or need_close: + # Make sure Connection: close is set + connection = set(get_comma_header(headers, b"connection")) + connection.discard(b"keep-alive") + connection.add(b"close") + headers = set_comma_header(headers, b"connection", sorted(connection)) + + return Response( + headers=headers, + status_code=response.status_code, + http_version=response.http_version, + reason=response.reason, + ) diff --git a/venv/lib/python3.10/site-packages/h11/_events.py b/venv/lib/python3.10/site-packages/h11/_events.py new file mode 100644 index 0000000..075bf8a --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/_events.py @@ -0,0 +1,369 @@ +# High level events that make up HTTP/1.1 conversations. Loosely inspired by +# the corresponding events in hyper-h2: +# +# http://python-hyper.org/h2/en/stable/api.html#events +# +# Don't subclass these. Stuff will break. + +import re +from abc import ABC +from dataclasses import dataclass, field +from typing import Any, cast, Dict, List, Tuple, Union + +from ._abnf import method, request_target +from ._headers import Headers, normalize_and_validate +from ._util import bytesify, LocalProtocolError, validate + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "Event", + "Request", + "InformationalResponse", + "Response", + "Data", + "EndOfMessage", + "ConnectionClosed", +] + +method_re = re.compile(method.encode("ascii")) +request_target_re = re.compile(request_target.encode("ascii")) + + +class Event(ABC): + """ + Base class for h11 events. + """ + + __slots__ = () + + +@dataclass(init=False, frozen=True) +class Request(Event): + """The beginning of an HTTP request. + + Fields: + + .. attribute:: method + + An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: target + + The target of an HTTP request, e.g. ``b"/index.html"``, or one of the + more exotic formats described in `RFC 7320, section 5.3 + `_. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + """ + + __slots__ = ("method", "headers", "target", "http_version") + + method: bytes + headers: Headers + target: bytes + http_version: bytes + + def __init__( + self, + *, + method: Union[bytes, str], + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + target: Union[bytes, str], + http_version: Union[bytes, str] = b"1.1", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "method", bytesify(method)) + object.__setattr__(self, "target", bytesify(target)) + object.__setattr__(self, "http_version", bytesify(http_version)) + else: + object.__setattr__(self, "method", method) + object.__setattr__(self, "target", target) + object.__setattr__(self, "http_version", http_version) + + # "A server MUST respond with a 400 (Bad Request) status code to any + # HTTP/1.1 request message that lacks a Host header field and to any + # request message that contains more than one Host header field or a + # Host header field with an invalid field-value." + # -- https://tools.ietf.org/html/rfc7230#section-5.4 + host_count = 0 + for name, value in self.headers: + if name == b"host": + host_count += 1 + if self.http_version == b"1.1" and host_count == 0: + raise LocalProtocolError("Missing mandatory Host: header") + if host_count > 1: + raise LocalProtocolError("Found multiple Host: headers") + + validate(method_re, self.method, "Illegal method characters") + validate(request_target_re, self.target, "Illegal target characters") + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class _ResponseBase(Event): + __slots__ = ("headers", "http_version", "reason", "status_code") + + headers: Headers + http_version: bytes + reason: bytes + status_code: int + + def __init__( + self, + *, + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + status_code: int, + http_version: Union[bytes, str] = b"1.1", + reason: Union[bytes, str] = b"", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "reason", bytesify(reason)) + object.__setattr__(self, "http_version", bytesify(http_version)) + if not isinstance(status_code, int): + raise LocalProtocolError("status code must be integer") + # Because IntEnum objects are instances of int, but aren't + # duck-compatible (sigh), see gh-72. + object.__setattr__(self, "status_code", int(status_code)) + else: + object.__setattr__(self, "reason", reason) + object.__setattr__(self, "http_version", http_version) + object.__setattr__(self, "status_code", status_code) + + self.__post_init__() + + def __post_init__(self) -> None: + pass + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class InformationalResponse(_ResponseBase): + """An HTTP informational response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`InformationalResponse`, this is always in the range [100, + 200). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for + details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (100 <= self.status_code < 200): + raise LocalProtocolError( + "InformationalResponse status_code should be in range " + "[100, 200), not {}".format(self.status_code) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Response(_ResponseBase): + """The beginning of an HTTP response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`Response`, this is always in the range [200, + 1000). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (200 <= self.status_code < 1000): + raise LocalProtocolError( + "Response status_code should be in range [200, 1000), not {}".format( + self.status_code + ) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Data(Event): + """Part of an HTTP message body. + + Fields: + + .. attribute:: data + + A :term:`bytes-like object` containing part of a message body. Or, if + using the ``combine=False`` argument to :meth:`Connection.send`, then + any object that your socket writing code knows what to do with, and for + which calling :func:`len` returns the number of bytes that will be + written -- see :ref:`sendfile` for details. + + .. attribute:: chunk_start + + A marker that indicates whether this data object is from the start of a + chunked transfer encoding chunk. This field is ignored when when a Data + event is provided to :meth:`Connection.send`: it is only valid on + events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + .. attribute:: chunk_end + + A marker that indicates whether this data object is the last for a + given chunked transfer encoding chunk. This field is ignored when when + a Data event is provided to :meth:`Connection.send`: it is only valid + on events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + """ + + __slots__ = ("data", "chunk_start", "chunk_end") + + data: bytes + chunk_start: bool + chunk_end: bool + + def __init__( + self, data: bytes, chunk_start: bool = False, chunk_end: bool = False + ) -> None: + object.__setattr__(self, "data", data) + object.__setattr__(self, "chunk_start", chunk_start) + object.__setattr__(self, "chunk_end", chunk_end) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that +# are forbidden to be sent in a trailer, since processing them as if they were +# present in the header section might bypass external security filters." +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part +# Unfortunately, the list of forbidden fields is long and vague :-/ +@dataclass(init=False, frozen=True) +class EndOfMessage(Event): + """The end of an HTTP message. + + Fields: + + .. attribute:: headers + + Default value: ``[]`` + + Any trailing headers attached to this message, represented as a list of + (name, value) pairs. See :ref:`the header normalization rules + ` for details. + + Must be empty unless ``Transfer-Encoding: chunked`` is in use. + + """ + + __slots__ = ("headers",) + + headers: Headers + + def __init__( + self, + *, + headers: Union[ + Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None + ] = None, + _parsed: bool = False, + ) -> None: + super().__init__() + if headers is None: + headers = Headers([]) + elif not isinstance(headers, Headers): + headers = normalize_and_validate(headers, _parsed=_parsed) + + object.__setattr__(self, "headers", headers) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(frozen=True) +class ConnectionClosed(Event): + """This event indicates that the sender has closed their outgoing + connection. + + Note that this does not necessarily mean that they can't *receive* further + data, because TCP connections are composed to two one-way channels which + can be closed independently. See :ref:`closing` for details. + + No fields. + """ + + pass diff --git a/venv/lib/python3.10/site-packages/h11/_headers.py b/venv/lib/python3.10/site-packages/h11/_headers.py new file mode 100644 index 0000000..acc4596 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/_headers.py @@ -0,0 +1,278 @@ +import re +from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union + +from ._abnf import field_name, field_value +from ._util import bytesify, LocalProtocolError, validate + +if TYPE_CHECKING: + from ._events import Request + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + + +# Facts +# ----- +# +# Headers are: +# keys: case-insensitive ascii +# values: mixture of ascii and raw bytes +# +# "Historically, HTTP has allowed field content with text in the ISO-8859-1 +# charset [ISO-8859-1], supporting other charsets only through use of +# [RFC2047] encoding. In practice, most HTTP header field values use only a +# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD +# limit their field values to US-ASCII octets. A recipient SHOULD treat other +# octets in field content (obs-text) as opaque data." +# And it deprecates all non-ascii values +# +# Leading/trailing whitespace in header names is forbidden +# +# Values get leading/trailing whitespace stripped +# +# Content-Disposition actually needs to contain unicode semantically; to +# accomplish this it has a terrifically weird way of encoding the filename +# itself as ascii (and even this still has lots of cross-browser +# incompatibilities) +# +# Order is important: +# "a proxy MUST NOT change the order of these field values when forwarding a +# message" +# (and there are several headers where the order indicates a preference) +# +# Multiple occurences of the same header: +# "A sender MUST NOT generate multiple header fields with the same field name +# in a message unless either the entire field value for that header field is +# defined as a comma-separated list [or the header is Set-Cookie which gets a +# special exception]" - RFC 7230. (cookies are in RFC 6265) +# +# So every header aside from Set-Cookie can be merged by b", ".join if it +# occurs repeatedly. But, of course, they can't necessarily be split by +# .split(b","), because quoting. +# +# Given all this mess (case insensitive, duplicates allowed, order is +# important, ...), there doesn't appear to be any standard way to handle +# headers in Python -- they're almost like dicts, but... actually just +# aren't. For now we punt and just use a super simple representation: headers +# are a list of pairs +# +# [(name1, value1), (name2, value2), ...] +# +# where all entries are bytestrings, names are lowercase and have no +# leading/trailing whitespace, and values are bytestrings with no +# leading/trailing whitespace. Searching and updating are done via naive O(n) +# methods. +# +# Maybe a dict-of-lists would be better? + +_content_length_re = re.compile(br"[0-9]+") +_field_name_re = re.compile(field_name.encode("ascii")) +_field_value_re = re.compile(field_value.encode("ascii")) + + +class Headers(Sequence[Tuple[bytes, bytes]]): + """ + A list-like interface that allows iterating over headers as byte-pairs + of (lowercased-name, value). + + Internally we actually store the representation as three-tuples, + including both the raw original casing, in order to preserve casing + over-the-wire, and the lowercased name, for case-insensitive comparisions. + + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert r.headers == [ + (b"host", b"example.org"), + (b"connection", b"keep-alive") + ] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive") + ] + """ + + __slots__ = "_full_items" + + def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None: + self._full_items = full_items + + def __bool__(self) -> bool: + return bool(self._full_items) + + def __eq__(self, other: object) -> bool: + return list(self) == list(other) # type: ignore + + def __len__(self) -> int: + return len(self._full_items) + + def __repr__(self) -> str: + return "" % repr(list(self)) + + def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override] + _, name, value = self._full_items[idx] + return (name, value) + + def raw_items(self) -> List[Tuple[bytes, bytes]]: + return [(raw_name, value) for raw_name, _, value in self._full_items] + + +HeaderTypes = Union[ + List[Tuple[bytes, bytes]], + List[Tuple[bytes, str]], + List[Tuple[str, bytes]], + List[Tuple[str, str]], +] + + +@overload +def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers: + ... + + +@overload +def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers: + ... + + +@overload +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + ... + + +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + new_headers = [] + seen_content_length = None + saw_transfer_encoding = False + for name, value in headers: + # For headers coming out of the parser, we can safely skip some steps, + # because it always returns bytes and has already run these regexes + # over the data: + if not _parsed: + name = bytesify(name) + value = bytesify(value) + validate(_field_name_re, name, "Illegal header name {!r}", name) + validate(_field_value_re, value, "Illegal header value {!r}", value) + assert isinstance(name, bytes) + assert isinstance(value, bytes) + + raw_name = name + name = name.lower() + if name == b"content-length": + lengths = {length.strip() for length in value.split(b",")} + if len(lengths) != 1: + raise LocalProtocolError("conflicting Content-Length headers") + value = lengths.pop() + validate(_content_length_re, value, "bad Content-Length") + if seen_content_length is None: + seen_content_length = value + new_headers.append((raw_name, name, value)) + elif seen_content_length != value: + raise LocalProtocolError("conflicting Content-Length headers") + elif name == b"transfer-encoding": + # "A server that receives a request message with a transfer coding + # it does not understand SHOULD respond with 501 (Not + # Implemented)." + # https://tools.ietf.org/html/rfc7230#section-3.3.1 + if saw_transfer_encoding: + raise LocalProtocolError( + "multiple Transfer-Encoding headers", error_status_hint=501 + ) + # "All transfer-coding names are case-insensitive" + # -- https://tools.ietf.org/html/rfc7230#section-4 + value = value.lower() + if value != b"chunked": + raise LocalProtocolError( + "Only Transfer-Encoding: chunked is supported", + error_status_hint=501, + ) + saw_transfer_encoding = True + new_headers.append((raw_name, name, value)) + else: + new_headers.append((raw_name, name, value)) + return Headers(new_headers) + + +def get_comma_header(headers: Headers, name: bytes) -> List[bytes]: + # Should only be used for headers whose value is a list of + # comma-separated, case-insensitive values. + # + # The header name `name` is expected to be lower-case bytes. + # + # Connection: meets these criteria (including cast insensitivity). + # + # Content-Length: technically is just a single value (1*DIGIT), but the + # standard makes reference to implementations that do multiple values, and + # using this doesn't hurt. Ditto, case insensitivity doesn't things either + # way. + # + # Transfer-Encoding: is more complex (allows for quoted strings), so + # splitting on , is actually wrong. For example, this is legal: + # + # Transfer-Encoding: foo; options="1,2", chunked + # + # and should be parsed as + # + # foo; options="1,2" + # chunked + # + # but this naive function will parse it as + # + # foo; options="1 + # 2" + # chunked + # + # However, this is okay because the only thing we are going to do with + # any Transfer-Encoding is reject ones that aren't just "chunked", so + # both of these will be treated the same anyway. + # + # Expect: the only legal value is the literal string + # "100-continue". Splitting on commas is harmless. Case insensitive. + # + out: List[bytes] = [] + for _, found_name, found_raw_value in headers._full_items: + if found_name == name: + found_raw_value = found_raw_value.lower() + for found_split_value in found_raw_value.split(b","): + found_split_value = found_split_value.strip() + if found_split_value: + out.append(found_split_value) + return out + + +def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers: + # The header name `name` is expected to be lower-case bytes. + # + # Note that when we store the header we use title casing for the header + # names, in order to match the conventional HTTP header style. + # + # Simply calling `.title()` is a blunt approach, but it's correct + # here given the cases where we're using `set_comma_header`... + # + # Connection, Content-Length, Transfer-Encoding. + new_headers: List[Tuple[bytes, bytes]] = [] + for found_raw_name, found_name, found_raw_value in headers._full_items: + if found_name != name: + new_headers.append((found_raw_name, found_raw_value)) + for new_value in new_values: + new_headers.append((name.title(), new_value)) + return normalize_and_validate(new_headers) + + +def has_expect_100_continue(request: "Request") -> bool: + # https://tools.ietf.org/html/rfc7231#section-5.1.1 + # "A server that receives a 100-continue expectation in an HTTP/1.0 request + # MUST ignore that expectation." + if request.http_version < b"1.1": + return False + expect = get_comma_header(request.headers, b"expect") + return b"100-continue" in expect diff --git a/venv/lib/python3.10/site-packages/h11/_readers.py b/venv/lib/python3.10/site-packages/h11/_readers.py new file mode 100644 index 0000000..a036d79 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/_readers.py @@ -0,0 +1,249 @@ +# Code to read HTTP data +# +# Strategy: each reader is a callable which takes a ReceiveBuffer object, and +# either: +# 1) consumes some of it and returns an Event +# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() +# and it might raise a LocalProtocolError, so simpler just to always use +# this) +# 3) returns None, meaning "I need more data" +# +# If they have a .read_eof attribute, then this will be called if an EOF is +# received -- but this is optional. Either way, the actual ConnectionClosed +# event will be generated afterwards. +# +# READERS is a dict describing how to pick a reader. It maps states to either: +# - a reader +# - or, for body readers, a dict of per-framing reader factories + +import re +from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union + +from ._abnf import chunk_header, header_field, request_line, status_line +from ._events import Data, EndOfMessage, InformationalResponse, Request, Response +from ._receivebuffer import ReceiveBuffer +from ._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, +) +from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate + +__all__ = ["READERS"] + +header_field_re = re.compile(header_field.encode("ascii")) + +# Remember that this has to run in O(n) time -- so e.g. the bytearray cast is +# critical. +obs_fold_re = re.compile(br"[ \t]+") + + +def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]: + it = iter(lines) + last: Optional[bytes] = None + for line in it: + match = obs_fold_re.match(line) + if match: + if last is None: + raise LocalProtocolError("continuation line at start of headers") + if not isinstance(last, bytearray): + last = bytearray(last) + last += b" " + last += line[match.end() :] + else: + if last is not None: + yield last + last = line + if last is not None: + yield last + + +def _decode_header_lines( + lines: Iterable[bytes], +) -> Iterable[Tuple[bytes, bytes]]: + for line in _obsolete_line_fold(lines): + matches = validate(header_field_re, line, "illegal header line: {!r}", line) + yield (matches["field_name"], matches["field_value"]) + + +request_line_re = re.compile(request_line.encode("ascii")) + + +def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no request line received") + matches = validate( + request_line_re, lines[0], "illegal request line: {!r}", lines[0] + ) + return Request( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +status_line_re = re.compile(status_line.encode("ascii")) + + +def maybe_read_from_SEND_RESPONSE_server( + buf: ReceiveBuffer, +) -> Union[InformationalResponse, Response, None]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no response line received") + matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) + http_version = ( + b"1.1" if matches["http_version"] is None else matches["http_version"] + ) + reason = b"" if matches["reason"] is None else matches["reason"] + status_code = int(matches["status_code"]) + class_: Union[Type[InformationalResponse], Type[Response]] = ( + InformationalResponse if status_code < 200 else Response + ) + return class_( + headers=list(_decode_header_lines(lines[1:])), + _parsed=True, + status_code=status_code, + reason=reason, + http_version=http_version, + ) + + +class ContentLengthReader: + def __init__(self, length: int) -> None: + self._length = length + self._remaining = length + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._remaining == 0: + return EndOfMessage() + data = buf.maybe_extract_at_most(self._remaining) + if data is None: + return None + self._remaining -= len(data) + return Data(data=data) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(received {} bytes, expected {})".format( + self._length - self._remaining, self._length + ) + ) + + +chunk_header_re = re.compile(chunk_header.encode("ascii")) + + +class ChunkedReader: + def __init__(self) -> None: + self._bytes_in_chunk = 0 + # After reading a chunk, we have to throw away the trailing \r\n; if + # this is >0 then we discard that many bytes before resuming regular + # de-chunkification. + self._bytes_to_discard = 0 + self._reading_trailer = False + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._reading_trailer: + lines = buf.maybe_extract_lines() + if lines is None: + return None + return EndOfMessage(headers=list(_decode_header_lines(lines))) + if self._bytes_to_discard > 0: + data = buf.maybe_extract_at_most(self._bytes_to_discard) + if data is None: + return None + self._bytes_to_discard -= len(data) + if self._bytes_to_discard > 0: + return None + # else, fall through and read some more + assert self._bytes_to_discard == 0 + if self._bytes_in_chunk == 0: + # We need to refill our chunk count + chunk_header = buf.maybe_extract_next_line() + if chunk_header is None: + return None + matches = validate( + chunk_header_re, + chunk_header, + "illegal chunk header: {!r}", + chunk_header, + ) + # XX FIXME: we discard chunk extensions. Does anyone care? + self._bytes_in_chunk = int(matches["chunk_size"], base=16) + if self._bytes_in_chunk == 0: + self._reading_trailer = True + return self(buf) + chunk_start = True + else: + chunk_start = False + assert self._bytes_in_chunk > 0 + data = buf.maybe_extract_at_most(self._bytes_in_chunk) + if data is None: + return None + self._bytes_in_chunk -= len(data) + if self._bytes_in_chunk == 0: + self._bytes_to_discard = 2 + chunk_end = True + else: + chunk_end = False + return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(incomplete chunked read)" + ) + + +class Http10Reader: + def __call__(self, buf: ReceiveBuffer) -> Optional[Data]: + data = buf.maybe_extract_at_most(999999999) + if data is None: + return None + return Data(data=data) + + def read_eof(self) -> EndOfMessage: + return EndOfMessage() + + +def expect_nothing(buf: ReceiveBuffer) -> None: + if buf: + raise LocalProtocolError("Got data when expecting EOF") + return None + + +ReadersType = Dict[ + Union[Sentinel, Tuple[Sentinel, Sentinel]], + Union[Callable[..., Any], Dict[str, Callable[..., Any]]], +] + +READERS: ReadersType = { + (CLIENT, IDLE): maybe_read_from_IDLE_client, + (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, + (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, + (CLIENT, DONE): expect_nothing, + (CLIENT, MUST_CLOSE): expect_nothing, + (CLIENT, CLOSED): expect_nothing, + (SERVER, DONE): expect_nothing, + (SERVER, MUST_CLOSE): expect_nothing, + (SERVER, CLOSED): expect_nothing, + SEND_BODY: { + "chunked": ChunkedReader, + "content-length": ContentLengthReader, + "http/1.0": Http10Reader, + }, +} diff --git a/venv/lib/python3.10/site-packages/h11/_receivebuffer.py b/venv/lib/python3.10/site-packages/h11/_receivebuffer.py new file mode 100644 index 0000000..e5c4e08 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/_receivebuffer.py @@ -0,0 +1,153 @@ +import re +import sys +from typing import List, Optional, Union + +__all__ = ["ReceiveBuffer"] + + +# Operations we want to support: +# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), +# or wait until there is one +# - read at-most-N bytes +# Goals: +# - on average, do this fast +# - worst case, do this in O(n) where n is the number of bytes processed +# Plan: +# - store bytearray, offset, how far we've searched for a separator token +# - use the how-far-we've-searched data to avoid rescanning +# - while doing a stream of uninterrupted processing, advance offset instead +# of constantly copying +# WARNING: +# - I haven't benchmarked or profiled any of this yet. +# +# Note that starting in Python 3.4, deleting the initial n bytes from a +# bytearray is amortized O(n), thanks to some excellent work by Antoine +# Martin: +# +# https://bugs.python.org/issue19087 +# +# This means that if we only supported 3.4+, we could get rid of the code here +# involving self._start and self.compress, because it's doing exactly the same +# thing that bytearray now does internally. +# +# BUT unfortunately, we still support 2.7, and reading short segments out of a +# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually +# delete this code. Yet: +# +# https://pythonclock.org/ +# +# (Two things to double-check first though: make sure PyPy also has the +# optimization, and benchmark to make sure it's a win, since we do have a +# slightly clever thing where we delay calling compress() until we've +# processed a whole event, which could in theory be slightly more efficient +# than the internal bytearray support.) +blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) + + +class ReceiveBuffer: + def __init__(self) -> None: + self._data = bytearray() + self._next_line_search = 0 + self._multiple_lines_search = 0 + + def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer": + self._data += byteslike + return self + + def __bool__(self) -> bool: + return bool(len(self)) + + def __len__(self) -> int: + return len(self._data) + + # for @property unprocessed_data + def __bytes__(self) -> bytes: + return bytes(self._data) + + def _extract(self, count: int) -> bytearray: + # extracting an initial slice of the data buffer and return it + out = self._data[:count] + del self._data[:count] + + self._next_line_search = 0 + self._multiple_lines_search = 0 + + return out + + def maybe_extract_at_most(self, count: int) -> Optional[bytearray]: + """ + Extract a fixed number of bytes from the buffer. + """ + out = self._data[:count] + if not out: + return None + + return self._extract(count) + + def maybe_extract_next_line(self) -> Optional[bytearray]: + """ + Extract the first line, if it is completed in the buffer. + """ + # Only search in buffer space that we've not already looked at. + search_start_index = max(0, self._next_line_search - 1) + partial_idx = self._data.find(b"\r\n", search_start_index) + + if partial_idx == -1: + self._next_line_search = len(self._data) + return None + + # + 2 is to compensate len(b"\r\n") + idx = partial_idx + 2 + + return self._extract(idx) + + def maybe_extract_lines(self) -> Optional[List[bytearray]]: + """ + Extract everything up to the first blank line, and return a list of lines. + """ + # Handle the case where we have an immediate empty line. + if self._data[:1] == b"\n": + self._extract(1) + return [] + + if self._data[:2] == b"\r\n": + self._extract(2) + return [] + + # Only search in buffer space that we've not already looked at. + match = blank_line_regex.search(self._data, self._multiple_lines_search) + if match is None: + self._multiple_lines_search = max(0, len(self._data) - 2) + return None + + # Truncate the buffer and return it. + idx = match.span(0)[-1] + out = self._extract(idx) + lines = out.split(b"\n") + + for line in lines: + if line.endswith(b"\r"): + del line[-1] + + assert lines[-2] == lines[-1] == b"" + + del lines[-2:] + + return lines + + # In theory we should wait until `\r\n` before starting to validate + # incoming data. However it's interesting to detect (very) invalid data + # early given they might not even contain `\r\n` at all (hence only + # timeout will get rid of them). + # This is not a 100% effective detection but more of a cheap sanity check + # allowing for early abort in some useful cases. + # This is especially interesting when peer is messing up with HTTPS and + # sent us a TLS stream where we were expecting plain HTTP given all + # versions of TLS so far start handshake with a 0x16 message type code. + def is_next_line_obviously_invalid_request_line(self) -> bool: + try: + # HTTP header line must not contain non-printable characters + # and should not start with a space + return self._data[0] < 0x21 + except IndexError: + return False diff --git a/venv/lib/python3.10/site-packages/h11/_state.py b/venv/lib/python3.10/site-packages/h11/_state.py new file mode 100644 index 0000000..2790768 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/_state.py @@ -0,0 +1,363 @@ +################################################################ +# The core state machine +################################################################ +# +# Rule 1: everything that affects the state machine and state transitions must +# live here in this file. As much as possible goes into the table-based +# representation, but for the bits that don't quite fit, the actual code and +# state must nonetheless live here. +# +# Rule 2: this file does not know about what role we're playing; it only knows +# about HTTP request/response cycles in the abstract. This ensures that we +# don't cheat and apply different rules to local and remote parties. +# +# +# Theory of operation +# =================== +# +# Possibly the simplest way to think about this is that we actually have 5 +# different state machines here. Yes, 5. These are: +# +# 1) The client state, with its complicated automaton (see the docs) +# 2) The server state, with its complicated automaton (see the docs) +# 3) The keep-alive state, with possible states {True, False} +# 4) The SWITCH_CONNECT state, with possible states {False, True} +# 5) The SWITCH_UPGRADE state, with possible states {False, True} +# +# For (3)-(5), the first state listed is the initial state. +# +# (1)-(3) are stored explicitly in member variables. The last +# two are stored implicitly in the pending_switch_proposals set as: +# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) +# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) +# +# And each of these machines has two different kinds of transitions: +# +# a) Event-triggered +# b) State-triggered +# +# Event triggered is the obvious thing that you'd think it is: some event +# happens, and if it's the right event at the right time then a transition +# happens. But there are somewhat complicated rules for which machines can +# "see" which events. (As a rule of thumb, if a machine "sees" an event, this +# means two things: the event can affect the machine, and if the machine is +# not in a state where it expects that event then it's an error.) These rules +# are: +# +# 1) The client machine sees all h11.events objects emitted by the client. +# +# 2) The server machine sees all h11.events objects emitted by the server. +# +# It also sees the client's Request event. +# +# And sometimes, server events are annotated with a _SWITCH_* event. For +# example, we can have a (Response, _SWITCH_CONNECT) event, which is +# different from a regular Response event. +# +# 3) The keep-alive machine sees the process_keep_alive_disabled() event +# (which is derived from Request/Response events), and this event +# transitions it from True -> False, or from False -> False. There's no way +# to transition back. +# +# 4&5) The _SWITCH_* machines transition from False->True when we get a +# Request that proposes the relevant type of switch (via +# process_client_switch_proposals), and they go from True->False when we +# get a Response that has no _SWITCH_* annotation. +# +# So that's event-triggered transitions. +# +# State-triggered transitions are less standard. What they do here is couple +# the machines together. The way this works is, when certain *joint* +# configurations of states are achieved, then we automatically transition to a +# new *joint* state. So, for example, if we're ever in a joint state with +# +# client: DONE +# keep-alive: False +# +# then the client state immediately transitions to: +# +# client: MUST_CLOSE +# +# This is fundamentally different from an event-based transition, because it +# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state +# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive +# transitioned True -> False. Either way, once this precondition is satisfied, +# this transition is immediately triggered. +# +# What if two conflicting state-based transitions get enabled at the same +# time? In practice there's only one case where this arises (client DONE -> +# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by +# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. +# +# Implementation +# -------------- +# +# The event-triggered transitions for the server and client machines are all +# stored explicitly in a table. Ditto for the state-triggered transitions that +# involve just the server and client state. +# +# The transitions for the other machines, and the state-triggered transitions +# that involve the other machines, are written out as explicit Python code. +# +# It'd be nice if there were some cleaner way to do all this. This isn't +# *too* terrible, but I feel like it could probably be better. +# +# WARNING +# ------- +# +# The script that generates the state machine diagrams for the docs knows how +# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS +# tables. But it can't automatically read the transitions that are written +# directly in Python code. So if you touch those, you need to also update the +# script to keep it in sync! +from typing import cast, Dict, Optional, Set, Tuple, Type, Union + +from ._events import * +from ._util import LocalProtocolError, Sentinel + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "CLIENT", + "SERVER", + "IDLE", + "SEND_RESPONSE", + "SEND_BODY", + "DONE", + "MUST_CLOSE", + "CLOSED", + "MIGHT_SWITCH_PROTOCOL", + "SWITCHED_PROTOCOL", + "ERROR", +] + + +class CLIENT(Sentinel, metaclass=Sentinel): + pass + + +class SERVER(Sentinel, metaclass=Sentinel): + pass + + +# States +class IDLE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_RESPONSE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_BODY(Sentinel, metaclass=Sentinel): + pass + + +class DONE(Sentinel, metaclass=Sentinel): + pass + + +class MUST_CLOSE(Sentinel, metaclass=Sentinel): + pass + + +class CLOSED(Sentinel, metaclass=Sentinel): + pass + + +class ERROR(Sentinel, metaclass=Sentinel): + pass + + +# Switch types +class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel): + pass + + +EventTransitionType = Dict[ + Type[Sentinel], + Dict[ + Type[Sentinel], + Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]], + ], +] + +EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = { + CLIENT: { + IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + MIGHT_SWITCH_PROTOCOL: {}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, + SERVER: { + IDLE: { + ConnectionClosed: CLOSED, + Response: SEND_BODY, + # Special case: server sees client Request events, in this form + (Request, CLIENT): SEND_RESPONSE, + }, + SEND_RESPONSE: { + InformationalResponse: SEND_RESPONSE, + Response: SEND_BODY, + (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, + (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, + }, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, +} + +# NB: there are also some special-case state-triggered transitions hard-coded +# into _fire_state_triggered_transitions below. +STATE_TRIGGERED_TRANSITIONS = { + # (Client state, Server state) -> new states + # Protocol negotiation + (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, + # Socket shutdown + (CLOSED, DONE): {SERVER: MUST_CLOSE}, + (CLOSED, IDLE): {SERVER: MUST_CLOSE}, + (ERROR, DONE): {SERVER: MUST_CLOSE}, + (DONE, CLOSED): {CLIENT: MUST_CLOSE}, + (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, + (DONE, ERROR): {CLIENT: MUST_CLOSE}, +} + + +class ConnectionState: + def __init__(self) -> None: + # Extra bits of state that don't quite fit into the state model. + + # If this is False then it enables the automatic DONE -> MUST_CLOSE + # transition. Don't set this directly; call .keep_alive_disabled() + self.keep_alive = True + + # This is a subset of {UPGRADE, CONNECT}, containing the proposals + # made by the client for switching protocols. + self.pending_switch_proposals: Set[Type[Sentinel]] = set() + + self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE} + + def process_error(self, role: Type[Sentinel]) -> None: + self.states[role] = ERROR + self._fire_state_triggered_transitions() + + def process_keep_alive_disabled(self) -> None: + self.keep_alive = False + self._fire_state_triggered_transitions() + + def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None: + self.pending_switch_proposals.add(switch_event) + self._fire_state_triggered_transitions() + + def process_event( + self, + role: Type[Sentinel], + event_type: Type[Event], + server_switch_event: Optional[Type[Sentinel]] = None, + ) -> None: + _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type + if server_switch_event is not None: + assert role is SERVER + if server_switch_event not in self.pending_switch_proposals: + raise LocalProtocolError( + "Received server {} event without a pending proposal".format( + server_switch_event + ) + ) + _event_type = (event_type, server_switch_event) + if server_switch_event is None and _event_type is Response: + self.pending_switch_proposals = set() + self._fire_event_triggered_transitions(role, _event_type) + # Special case: the server state does get to see Request + # events. + if _event_type is Request: + assert role is CLIENT + self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) + self._fire_state_triggered_transitions() + + def _fire_event_triggered_transitions( + self, + role: Type[Sentinel], + event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], + ) -> None: + state = self.states[role] + try: + new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] + except KeyError: + event_type = cast(Type[Event], event_type) + raise LocalProtocolError( + "can't handle event type {} when role={} and state={}".format( + event_type.__name__, role, self.states[role] + ) + ) from None + self.states[role] = new_state + + def _fire_state_triggered_transitions(self) -> None: + # We apply these rules repeatedly until converging on a fixed point + while True: + start_states = dict(self.states) + + # It could happen that both these special-case transitions are + # enabled at the same time: + # + # DONE -> MIGHT_SWITCH_PROTOCOL + # DONE -> MUST_CLOSE + # + # For example, this will always be true of a HTTP/1.0 client + # requesting CONNECT. If this happens, the protocol switch takes + # priority. From there the client will either go to + # SWITCHED_PROTOCOL, in which case it's none of our business when + # they close the connection, or else the server will deny the + # request, in which case the client will go back to DONE and then + # from there to MUST_CLOSE. + if self.pending_switch_proposals: + if self.states[CLIENT] is DONE: + self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL + + if not self.pending_switch_proposals: + if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: + self.states[CLIENT] = DONE + + if not self.keep_alive: + for role in (CLIENT, SERVER): + if self.states[role] is DONE: + self.states[role] = MUST_CLOSE + + # Tabular state-triggered transitions + joint_state = (self.states[CLIENT], self.states[SERVER]) + changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) + self.states.update(changes) # type: ignore + + if self.states == start_states: + # Fixed point reached + return + + def start_next_cycle(self) -> None: + if self.states != {CLIENT: DONE, SERVER: DONE}: + raise LocalProtocolError( + "not in a reusable state. self.states={}".format(self.states) + ) + # Can't reach DONE/DONE with any of these active, but still, let's be + # sure. + assert self.keep_alive + assert not self.pending_switch_proposals + self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/venv/lib/python3.10/site-packages/h11/_util.py b/venv/lib/python3.10/site-packages/h11/_util.py new file mode 100644 index 0000000..6718445 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/_util.py @@ -0,0 +1,135 @@ +from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union + +__all__ = [ + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "validate", + "bytesify", +] + + +class ProtocolError(Exception): + """Exception indicating a violation of the HTTP/1.1 protocol. + + This as an abstract base class, with two concrete base classes: + :exc:`LocalProtocolError`, which indicates that you tried to do something + that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which + indicates that the remote peer tried to do something that HTTP/1.1 says is + illegal. See :ref:`error-handling` for details. + + In addition to the normal :exc:`Exception` features, it has one attribute: + + .. attribute:: error_status_hint + + This gives a suggestion as to what status code a server might use if + this error occurred as part of a request. + + For a :exc:`RemoteProtocolError`, this is useful as a suggestion for + how you might want to respond to a misbehaving peer, if you're + implementing a server. + + For a :exc:`LocalProtocolError`, this can be taken as a suggestion for + how your peer might have responded to *you* if h11 had allowed you to + continue. + + The default is 400 Bad Request, a generic catch-all for protocol + violations. + + """ + + def __init__(self, msg: str, error_status_hint: int = 400) -> None: + if type(self) is ProtocolError: + raise TypeError("tried to directly instantiate ProtocolError") + Exception.__init__(self, msg) + self.error_status_hint = error_status_hint + + +# Strategy: there are a number of public APIs where a LocalProtocolError can +# be raised (send(), all the different event constructors, ...), and only one +# public API where RemoteProtocolError can be raised +# (receive_data()). Therefore we always raise LocalProtocolError internally, +# and then receive_data will translate this into a RemoteProtocolError. +# +# Internally: +# LocalProtocolError is the generic "ProtocolError". +# Externally: +# LocalProtocolError is for local errors and RemoteProtocolError is for +# remote errors. +class LocalProtocolError(ProtocolError): + def _reraise_as_remote_protocol_error(self) -> NoReturn: + # After catching a LocalProtocolError, use this method to re-raise it + # as a RemoteProtocolError. This method must be called from inside an + # except: block. + # + # An easy way to get an equivalent RemoteProtocolError is just to + # modify 'self' in place. + self.__class__ = RemoteProtocolError # type: ignore + # But the re-raising is somewhat non-trivial -- you might think that + # now that we've modified the in-flight exception object, that just + # doing 'raise' to re-raise it would be enough. But it turns out that + # this doesn't work, because Python tracks the exception type + # (exc_info[0]) separately from the exception object (exc_info[1]), + # and we only modified the latter. So we really do need to re-raise + # the new type explicitly. + # On py3, the traceback is part of the exception object, so our + # in-place modification preserved it and we can just re-raise: + raise self + + +class RemoteProtocolError(ProtocolError): + pass + + +def validate( + regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any +) -> Dict[str, bytes]: + match = regex.fullmatch(data) + if not match: + if format_args: + msg = msg.format(*format_args) + raise LocalProtocolError(msg) + return match.groupdict() + + +# Sentinel values +# +# - Inherit identity-based comparison and hashing from object +# - Have a nice repr +# - Have a *bonus property*: type(sentinel) is sentinel +# +# The bonus property is useful if you want to take the return value from +# next_event() and do some sort of dispatch based on type(event). + +_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel") + + +class Sentinel(type): + def __new__( + cls: Type[_T_Sentinel], + name: str, + bases: Tuple[type, ...], + namespace: Dict[str, Any], + **kwds: Any + ) -> _T_Sentinel: + assert bases == (Sentinel,) + v = super().__new__(cls, name, bases, namespace, **kwds) + v.__class__ = v # type: ignore + return v + + def __repr__(self) -> str: + return self.__name__ + + +# Used for methods, request targets, HTTP versions, header names, and header +# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always +# returns bytes. +def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes: + # Fast-path: + if type(s) is bytes: + return s + if isinstance(s, str): + s = s.encode("ascii") + if isinstance(s, int): + raise TypeError("expected bytes-like object, not int") + return bytes(s) diff --git a/venv/lib/python3.10/site-packages/h11/_version.py b/venv/lib/python3.10/site-packages/h11/_version.py new file mode 100644 index 0000000..75d4288 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/_version.py @@ -0,0 +1,16 @@ +# This file must be kept very simple, because it is consumed from several +# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. + +# We use a simple scheme: +# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev +# where the +dev versions are never released into the wild, they're just what +# we stick into the VCS in between releases. +# +# This is compatible with PEP 440: +# http://legacy.python.org/dev/peps/pep-0440/ +# via the use of the "local suffix" "+dev", which is disallowed on index +# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we +# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* +# 1.0.0.) + +__version__ = "0.13.0" diff --git a/venv/lib/python3.10/site-packages/h11/_writers.py b/venv/lib/python3.10/site-packages/h11/_writers.py new file mode 100644 index 0000000..90a8dc0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/_writers.py @@ -0,0 +1,145 @@ +# Code to read HTTP data +# +# Strategy: each writer takes an event + a write-some-bytes function, which is +# calls. +# +# WRITERS is a dict describing how to pick a reader. It maps states to either: +# - a writer +# - or, for body writers, a dict of framin-dependent writer factories + +from typing import Any, Callable, Dict, List, Tuple, Type, Union + +from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response +from ._headers import Headers +from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER +from ._util import LocalProtocolError, Sentinel + +__all__ = ["WRITERS"] + +Writer = Callable[[bytes], Any] + + +def write_headers(headers: Headers, write: Writer) -> None: + # "Since the Host field-value is critical information for handling a + # request, a user agent SHOULD generate Host as the first header field + # following the request-line." - RFC 7230 + raw_items = headers._full_items + for raw_name, name, value in raw_items: + if name == b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + for raw_name, name, value in raw_items: + if name != b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + write(b"\r\n") + + +def write_request(request: Request, write: Writer) -> None: + if request.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) + write_headers(request.headers, write) + + +# Shared between InformationalResponse and Response +def write_any_response( + response: Union[InformationalResponse, Response], write: Writer +) -> None: + if response.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + status_bytes = str(response.status_code).encode("ascii") + # We don't bother sending ascii status messages like "OK"; they're + # optional and ignored by the protocol. (But the space after the numeric + # status code is mandatory.) + # + # XX FIXME: could at least make an effort to pull out the status message + # from stdlib's http.HTTPStatus table. Or maybe just steal their enums + # (either by import or copy/paste). We already accept them as status codes + # since they're of type IntEnum < int. + write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) + write_headers(response.headers, write) + + +class BodyWriter: + def __call__(self, event: Event, write: Writer) -> None: + if type(event) is Data: + self.send_data(event.data, write) + elif type(event) is EndOfMessage: + self.send_eom(event.headers, write) + else: # pragma: no cover + assert False + + def send_data(self, data: bytes, write: Writer) -> None: + pass + + def send_eom(self, headers: Headers, write: Writer) -> None: + pass + + +# +# These are all careful not to do anything to 'data' except call len(data) and +# write(data). This allows us to transparently pass-through funny objects, +# like placeholder objects referring to files on disk that will be sent via +# sendfile(2). +# +class ContentLengthWriter(BodyWriter): + def __init__(self, length: int) -> None: + self._length = length + + def send_data(self, data: bytes, write: Writer) -> None: + self._length -= len(data) + if self._length < 0: + raise LocalProtocolError("Too much data for declared Content-Length") + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if self._length != 0: + raise LocalProtocolError("Too little data for declared Content-Length") + if headers: + raise LocalProtocolError("Content-Length and trailers don't mix") + + +class ChunkedWriter(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + # if we encoded 0-length data in the naive way, it would look like an + # end-of-message. + if not data: + return + write(b"%x\r\n" % len(data)) + write(data) + write(b"\r\n") + + def send_eom(self, headers: Headers, write: Writer) -> None: + write(b"0\r\n") + write_headers(headers, write) + + +class Http10Writer(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if headers: + raise LocalProtocolError("can't send trailers to HTTP/1.0 client") + # no need to close the socket ourselves, that will be taken care of by + # Connection: close machinery + + +WritersType = Dict[ + Union[Tuple[Sentinel, Sentinel], Sentinel], + Union[ + Dict[str, Type[BodyWriter]], + Callable[[Union[InformationalResponse, Response], Writer], None], + Callable[[Request, Writer], None], + ], +] + +WRITERS: WritersType = { + (CLIENT, IDLE): write_request, + (SERVER, IDLE): write_any_response, + (SERVER, SEND_RESPONSE): write_any_response, + SEND_BODY: { + "chunked": ChunkedWriter, + "content-length": ContentLengthWriter, + "http/1.0": Http10Writer, + }, +} diff --git a/venv/lib/python3.10/site-packages/h11/py.typed b/venv/lib/python3.10/site-packages/h11/py.typed new file mode 100644 index 0000000..f5642f7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/py.typed @@ -0,0 +1 @@ +Marker diff --git a/venv/lib/python3.10/site-packages/h11/tests/__init__.py b/venv/lib/python3.10/site-packages/h11/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/h11/tests/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..079f243 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/tests/__pycache__/helpers.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/helpers.cpython-310.pyc new file mode 100644 index 0000000..717b9e5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/helpers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-310.pyc new file mode 100644 index 0000000..b2d83c7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_connection.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_connection.cpython-310.pyc new file mode 100644 index 0000000..d9a41a3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_connection.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_events.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_events.cpython-310.pyc new file mode 100644 index 0000000..a25bb32 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_events.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_headers.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_headers.cpython-310.pyc new file mode 100644 index 0000000..e88cf72 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_headers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_helpers.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_helpers.cpython-310.pyc new file mode 100644 index 0000000..e9ee10f Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_helpers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_io.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_io.cpython-310.pyc new file mode 100644 index 0000000..3a3cbdb Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_io.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-310.pyc new file mode 100644 index 0000000..124a558 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_state.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_state.cpython-310.pyc new file mode 100644 index 0000000..966681e Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_state.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_util.cpython-310.pyc b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_util.cpython-310.pyc new file mode 100644 index 0000000..4d8e409 Binary files /dev/null and b/venv/lib/python3.10/site-packages/h11/tests/__pycache__/test_util.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/h11/tests/data/test-file b/venv/lib/python3.10/site-packages/h11/tests/data/test-file new file mode 100644 index 0000000..d0be0a6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/tests/data/test-file @@ -0,0 +1 @@ +92b12bc045050b55b848d37167a1a63947c364579889ce1d39788e45e9fac9e5 diff --git a/venv/lib/python3.10/site-packages/h11/tests/helpers.py b/venv/lib/python3.10/site-packages/h11/tests/helpers.py new file mode 100644 index 0000000..571be44 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/tests/helpers.py @@ -0,0 +1,101 @@ +from typing import cast, List, Type, Union, ValuesView + +from .._connection import Connection, NEED_DATA, PAUSED +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import CLIENT, CLOSED, DONE, MUST_CLOSE, SERVER +from .._util import Sentinel + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + + +def get_all_events(conn: Connection) -> List[Event]: + got_events = [] + while True: + event = conn.next_event() + if event in (NEED_DATA, PAUSED): + break + event = cast(Event, event) + got_events.append(event) + if type(event) is ConnectionClosed: + break + return got_events + + +def receive_and_get(conn: Connection, data: bytes) -> List[Event]: + conn.receive_data(data) + return get_all_events(conn) + + +# Merges adjacent Data events, converts payloads to bytestrings, and removes +# chunk boundaries. +def normalize_data_events(in_events: List[Event]) -> List[Event]: + out_events: List[Event] = [] + for event in in_events: + if type(event) is Data: + event = Data(data=bytes(event.data), chunk_start=False, chunk_end=False) + if out_events and type(out_events[-1]) is type(event) is Data: + out_events[-1] = Data( + data=out_events[-1].data + event.data, + chunk_start=out_events[-1].chunk_start, + chunk_end=out_events[-1].chunk_end, + ) + else: + out_events.append(event) + return out_events + + +# Given that we want to write tests that push some events through a Connection +# and check that its state updates appropriately... we might as make a habit +# of pushing them through two Connections with a fake network link in +# between. +class ConnectionPair: + def __init__(self) -> None: + self.conn = {CLIENT: Connection(CLIENT), SERVER: Connection(SERVER)} + self.other = {CLIENT: SERVER, SERVER: CLIENT} + + @property + def conns(self) -> ValuesView[Connection]: + return self.conn.values() + + # expect="match" if expect=send_events; expect=[...] to say what expected + def send( + self, + role: Type[Sentinel], + send_events: Union[List[Event], Event], + expect: Union[List[Event], Event, Literal["match"]] = "match", + ) -> bytes: + if not isinstance(send_events, list): + send_events = [send_events] + data = b"" + closed = False + for send_event in send_events: + new_data = self.conn[role].send(send_event) + if new_data is None: + closed = True + else: + data += new_data + # send uses b"" to mean b"", and None to mean closed + # receive uses b"" to mean closed, and None to mean "try again" + # so we have to translate between the two conventions + if data: + self.conn[self.other[role]].receive_data(data) + if closed: + self.conn[self.other[role]].receive_data(b"") + got_events = get_all_events(self.conn[self.other[role]]) + if expect == "match": + expect = send_events + if not isinstance(expect, list): + expect = [expect] + assert got_events == expect + return data diff --git a/venv/lib/python3.10/site-packages/h11/tests/test_against_stdlib_http.py b/venv/lib/python3.10/site-packages/h11/tests/test_against_stdlib_http.py new file mode 100644 index 0000000..d2ee131 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/tests/test_against_stdlib_http.py @@ -0,0 +1,115 @@ +import json +import os.path +import socket +import socketserver +import threading +from contextlib import closing, contextmanager +from http.server import SimpleHTTPRequestHandler +from typing import Callable, Generator +from urllib.request import urlopen + +import h11 + + +@contextmanager +def socket_server( + handler: Callable[..., socketserver.BaseRequestHandler] +) -> Generator[socketserver.TCPServer, None, None]: + httpd = socketserver.TCPServer(("127.0.0.1", 0), handler) + thread = threading.Thread( + target=httpd.serve_forever, kwargs={"poll_interval": 0.01} + ) + thread.daemon = True + try: + thread.start() + yield httpd + finally: + httpd.shutdown() + + +test_file_path = os.path.join(os.path.dirname(__file__), "data/test-file") +with open(test_file_path, "rb") as f: + test_file_data = f.read() + + +class SingleMindedRequestHandler(SimpleHTTPRequestHandler): + def translate_path(self, path: str) -> str: + return test_file_path + + +def test_h11_as_client() -> None: + with socket_server(SingleMindedRequestHandler) as httpd: + with closing(socket.create_connection(httpd.server_address)) as s: + c = h11.Connection(h11.CLIENT) + + s.sendall( + c.send( # type: ignore[arg-type] + h11.Request( + method="GET", target="/foo", headers=[("Host", "localhost")] + ) + ) + ) + s.sendall(c.send(h11.EndOfMessage())) # type: ignore[arg-type] + + data = bytearray() + while True: + event = c.next_event() + print(event) + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Response: + assert event.status_code == 200 + if type(event) is h11.Data: + data += event.data + if type(event) is h11.EndOfMessage: + break + assert bytes(data) == test_file_data + + +class H11RequestHandler(socketserver.BaseRequestHandler): + def handle(self) -> None: + with closing(self.request) as s: + c = h11.Connection(h11.SERVER) + request = None + while True: + event = c.next_event() + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Request: + request = event + if type(event) is h11.EndOfMessage: + break + assert request is not None + info = json.dumps( + { + "method": request.method.decode("ascii"), + "target": request.target.decode("ascii"), + "headers": { + name.decode("ascii"): value.decode("ascii") + for (name, value) in request.headers + }, + } + ) + s.sendall(c.send(h11.Response(status_code=200, headers=[]))) # type: ignore[arg-type] + s.sendall(c.send(h11.Data(data=info.encode("ascii")))) + s.sendall(c.send(h11.EndOfMessage())) + + +def test_h11_as_server() -> None: + with socket_server(H11RequestHandler) as httpd: + host, port = httpd.server_address + url = "http://{}:{}/some-path".format(host, port) + with closing(urlopen(url)) as f: + assert f.getcode() == 200 + data = f.read() + info = json.loads(data.decode("ascii")) + print(info) + assert info["method"] == "GET" + assert info["target"] == "/some-path" + assert "urllib" in info["headers"]["user-agent"] diff --git a/venv/lib/python3.10/site-packages/h11/tests/test_connection.py b/venv/lib/python3.10/site-packages/h11/tests/test_connection.py new file mode 100644 index 0000000..73a27b9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/tests/test_connection.py @@ -0,0 +1,1122 @@ +from typing import Any, cast, Dict, List, Optional, Tuple, Type + +import pytest + +from .._connection import _body_framing, _keep_alive, Connection, NEED_DATA, PAUSED +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError, RemoteProtocolError, Sentinel +from .helpers import ConnectionPair, get_all_events, receive_and_get + + +def test__keep_alive() -> None: + assert _keep_alive( + Request(method="GET", target="/", headers=[("Host", "Example.com")]) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "close")], + ) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "a, b, cLOse, foo")], + ) + ) + assert not _keep_alive( + Request(method="GET", target="/", headers=[], http_version="1.0") # type: ignore[arg-type] + ) + + assert _keep_alive(Response(status_code=200, headers=[])) # type: ignore[arg-type] + assert not _keep_alive(Response(status_code=200, headers=[("Connection", "close")])) + assert not _keep_alive( + Response(status_code=200, headers=[("Connection", "a, b, cLOse, foo")]) + ) + assert not _keep_alive(Response(status_code=200, headers=[], http_version="1.0")) # type: ignore[arg-type] + + +def test__body_framing() -> None: + def headers(cl: Optional[int], te: bool) -> List[Tuple[str, str]]: + headers = [] + if cl is not None: + headers.append(("Content-Length", str(cl))) + if te: + headers.append(("Transfer-Encoding", "chunked")) + return headers + + def resp( + status_code: int = 200, cl: Optional[int] = None, te: bool = False + ) -> Response: + return Response(status_code=status_code, headers=headers(cl, te)) + + def req(cl: Optional[int] = None, te: bool = False) -> Request: + h = headers(cl, te) + h += [("Host", "example.com")] + return Request(method="GET", target="/", headers=h) + + # Special cases where the headers are ignored: + for kwargs in [{}, {"cl": 100}, {"te": True}, {"cl": 100, "te": True}]: + kwargs = cast(Dict[str, Any], kwargs) + for meth, r in [ + (b"HEAD", resp(**kwargs)), + (b"GET", resp(status_code=204, **kwargs)), + (b"GET", resp(status_code=304, **kwargs)), + ]: + assert _body_framing(meth, r) == ("content-length", (0,)) + + # Transfer-encoding + for kwargs in [{"te": True}, {"cl": 100, "te": True}]: + kwargs = cast(Dict[str, Any], kwargs) + for meth, r in [(None, req(**kwargs)), (b"GET", resp(**kwargs))]: # type: ignore + assert _body_framing(meth, r) == ("chunked", ()) + + # Content-Length + for meth, r in [(None, req(cl=100)), (b"GET", resp(cl=100))]: # type: ignore + assert _body_framing(meth, r) == ("content-length", (100,)) + + # No headers + assert _body_framing(None, req()) == ("content-length", (0,)) # type: ignore + assert _body_framing(b"GET", resp()) == ("http/1.0", ()) + + +def test_Connection_basics_and_content_length() -> None: + with pytest.raises(ValueError): + Connection("CLIENT") # type: ignore + + p = ConnectionPair() + assert p.conn[CLIENT].our_role is CLIENT + assert p.conn[CLIENT].their_role is SERVER + assert p.conn[SERVER].our_role is SERVER + assert p.conn[SERVER].their_role is CLIENT + + data = p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Content-Length", "10")], + ), + ) + assert data == ( + b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 10\r\n\r\n" + ) + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + assert p.conn[CLIENT].our_state is SEND_BODY + assert p.conn[CLIENT].their_state is SEND_RESPONSE + assert p.conn[SERVER].our_state is SEND_RESPONSE + assert p.conn[SERVER].their_state is SEND_BODY + + assert p.conn[CLIENT].their_http_version is None + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] + assert data == b"HTTP/1.1 100 \r\n\r\n" + + data = p.send(SERVER, Response(status_code=200, headers=[("Content-Length", "11")])) + assert data == b"HTTP/1.1 200 \r\nContent-Length: 11\r\n\r\n" + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + assert p.conn[CLIENT].their_http_version == b"1.1" + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(CLIENT, Data(data=b"12345")) + assert data == b"12345" + data = p.send( + CLIENT, Data(data=b"67890"), expect=[Data(data=b"67890"), EndOfMessage()] + ) + assert data == b"67890" + data = p.send(CLIENT, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + + data = p.send(SERVER, Data(data=b"1234567890")) + assert data == b"1234567890" + data = p.send(SERVER, Data(data=b"1"), expect=[Data(data=b"1"), EndOfMessage()]) + assert data == b"1" + data = p.send(SERVER, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunked() -> None: + p = ConnectionPair() + + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ), + ) + data = p.send(CLIENT, Data(data=b"1234567890", chunk_start=True, chunk_end=True)) + assert data == b"a\r\n1234567890\r\n" + data = p.send(CLIENT, Data(data=b"abcde", chunk_start=True, chunk_end=True)) + assert data == b"5\r\nabcde\r\n" + data = p.send(CLIENT, Data(data=b""), expect=[]) + assert data == b"" + data = p.send(CLIENT, EndOfMessage(headers=[("hello", "there")])) + assert data == b"0\r\nhello: there\r\n\r\n" + + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + p.send(SERVER, Data(data=b"54321", chunk_start=True, chunk_end=True)) + p.send(SERVER, Data(data=b"12345", chunk_start=True, chunk_end=True)) + p.send(SERVER, EndOfMessage()) + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunk_boundaries() -> None: + conn = Connection(our_role=SERVER) + + request = ( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + ) + conn.receive_data(request) + assert conn.next_event() == Request( + method="POST", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ) + assert conn.next_event() is NEED_DATA + + conn.receive_data(b"5\r\nhello\r\n") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"5\r\nhel") + assert conn.next_event() == Data(data=b"hel", chunk_start=True, chunk_end=False) + + conn.receive_data(b"l") + assert conn.next_event() == Data(data=b"l", chunk_start=False, chunk_end=False) + + conn.receive_data(b"o\r\n") + assert conn.next_event() == Data(data=b"o", chunk_start=False, chunk_end=True) + + conn.receive_data(b"5\r\nhello") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"\r\n") + assert conn.next_event() == NEED_DATA + + conn.receive_data(b"0\r\n\r\n") + assert conn.next_event() == EndOfMessage() + + +def test_client_talking_to_http10_server() -> None: + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "example.com")])) + c.send(EndOfMessage()) + assert c.our_state is DONE + # No content-length, so Http10 framing for body + assert receive_and_get(c, b"HTTP/1.0 200 OK\r\n\r\n") == [ + Response(status_code=200, headers=[], http_version="1.0", reason=b"OK") # type: ignore[arg-type] + ] + assert c.our_state is MUST_CLOSE + assert receive_and_get(c, b"12345") == [Data(data=b"12345")] + assert receive_and_get(c, b"67890") == [Data(data=b"67890")] + assert receive_and_get(c, b"") == [EndOfMessage(), ConnectionClosed()] + assert c.their_state is CLOSED + + +def test_server_talking_to_http10_client() -> None: + c = Connection(SERVER) + # No content-length, so no body + # NB: no host header + assert receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] + EndOfMessage(), + ] + assert c.their_state is MUST_CLOSE + + # We automatically Connection: close back at them + assert ( + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + + assert c.send(Data(data=b"12345")) == b"12345" + assert c.send(EndOfMessage()) == b"" + assert c.our_state is MUST_CLOSE + + # Check that it works if they do send Content-Length + c = Connection(SERVER) + # NB: no host header + assert receive_and_get(c, b"POST / HTTP/1.0\r\nContent-Length: 10\r\n\r\n1") == [ + Request( + method="POST", + target="/", + headers=[("Content-Length", "10")], + http_version="1.0", + ), + Data(data=b"1"), + ] + assert receive_and_get(c, b"234567890") == [Data(data=b"234567890"), EndOfMessage()] + assert c.their_state is MUST_CLOSE + assert receive_and_get(c, b"") == [ConnectionClosed()] + + +def test_automatic_transfer_encoding_in_response() -> None: + # Check that in responses, the user can specify either Transfer-Encoding: + # chunked or no framing at all, and in both cases we automatically select + # the right option depending on whether the peer speaks HTTP/1.0 or + # HTTP/1.1 + for user_headers in [ + [("Transfer-Encoding", "chunked")], + [], + # In fact, this even works if Content-Length is set, + # because if both are set then Transfer-Encoding wins + [("Transfer-Encoding", "chunked"), ("Content-Length", "100")], + ]: + user_headers = cast(List[Tuple[str, str]], user_headers) + p = ConnectionPair() + p.send( + CLIENT, + [ + Request(method="GET", target="/", headers=[("Host", "example.com")]), + EndOfMessage(), + ], + ) + # When speaking to HTTP/1.1 client, all of the above cases get + # normalized to Transfer-Encoding: chunked + p.send( + SERVER, + Response(status_code=200, headers=user_headers), + expect=Response( + status_code=200, headers=[("Transfer-Encoding", "chunked")] + ), + ) + + # When speaking to HTTP/1.0 client, all of the above cases get + # normalized to no-framing-headers + c = Connection(SERVER) + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert ( + c.send(Response(status_code=200, headers=user_headers)) + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + assert c.send(Data(data=b"12345")) == b"12345" + + +def test_automagic_connection_close_handling() -> None: + p = ConnectionPair() + # If the user explicitly sets Connection: close, then we notice and + # respect it + p.send( + CLIENT, + [ + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Connection", "close")], + ), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states[CLIENT] is MUST_CLOSE + # And if the client sets it, the server automatically echoes it back + p.send( + SERVER, + # no header here... + [Response(status_code=204, headers=[]), EndOfMessage()], # type: ignore[arg-type] + # ...but oh look, it arrived anyway + expect=[ + Response(status_code=204, headers=[("connection", "close")]), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_100_continue() -> None: + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[ + ("Host", "example.com"), + ("Content-Length", "100"), + ("Expect", "100-continue"), + ], + ), + ) + for conn in p.conns: + assert conn.client_is_waiting_for_100_continue + assert not p.conn[CLIENT].they_are_waiting_for_100_continue + assert p.conn[SERVER].they_are_waiting_for_100_continue + return p + + # Disabled by 100 Continue + p = setup() + p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by a real response + p = setup() + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by the client going ahead and sending stuff anyway + p = setup() + p.send(CLIENT, Data(data=b"12345")) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + +def test_max_incomplete_event_size_countermeasure() -> None: + # Infinitely long headers are definitely not okay + c = Connection(SERVER) + c.receive_data(b"GET / HTTP/1.0\r\nEndless: ") + assert c.next_event() is NEED_DATA + with pytest.raises(RemoteProtocolError): + while True: + c.receive_data(b"a" * 1024) + c.next_event() + + # Checking that the same header is accepted / rejected depending on the + # max_incomplete_event_size setting: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + c.receive_data(b"\r\n\r\n") + assert get_all_events(c) == [ + Request( + method="GET", target="/", http_version="1.0", headers=[("big", "a" * 4000)] + ), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=4000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + with pytest.raises(RemoteProtocolError): + c.next_event() + + # Temporarily exceeding the size limit is fine, as long as its done with + # complete events: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nContent-Length: 10000") + c.receive_data(b"\r\n\r\n" + b"a" * 10000) + assert get_all_events(c) == [ + Request( + method="GET", + target="/", + http_version="1.0", + headers=[("Content-Length", "10000")], + ), + Data(data=b"a" * 10000), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=100) + # Two pipelined requests to create a way-too-big receive buffer... but + # it's fine because we're not checking + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a\r\n\r\n" + b"GET /2 HTTP/1.1\r\nHost: b\r\n\r\n" + b"X" * 1000 + ) + assert get_all_events(c) == [ + Request(method="GET", target="/1", headers=[("host", "a")]), + EndOfMessage(), + ] + # Even more data comes in, still no problem + c.receive_data(b"X" * 1000) + # We can respond and reuse to get the second pipelined request + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + assert get_all_events(c) == [ + Request(method="GET", target="/2", headers=[("host", "b")]), + EndOfMessage(), + ] + # But once we unpause and try to read the next message, and find that it's + # incomplete and the buffer is *still* way too large, then *that's* a + # problem: + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_reuse_simple() -> None: + p = ConnectionPair() + p.send( + CLIENT, + [Request(method="GET", target="/", headers=[("Host", "a")]), EndOfMessage()], + ) + p.send( + SERVER, + [ + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + conn.start_next_cycle() + + p.send( + CLIENT, + [ + Request(method="DELETE", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ], + ) + p.send( + SERVER, + [ + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ], + ) + + +def test_pipelining() -> None: + # Client doesn't support pipelining, so we have to do this by hand + c = Connection(SERVER) + assert c.next_event() is NEED_DATA + # 3 requests all bunched up + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + b"GET /3 HTTP/1.1\r\nHost: a.com\r\n\r\n" + ) + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.their_state is DONE + assert c.our_state is SEND_RESPONSE + + assert c.next_event() is PAUSED + + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.their_state is DONE + assert c.our_state is DONE + + c.start_next_cycle() + + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ] + assert c.next_event() is PAUSED + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + + assert get_all_events(c) == [ + Request(method="GET", target="/3", headers=[("Host", "a.com")]), + EndOfMessage(), + ] + # Doesn't pause this time, no trailing data + assert c.next_event() is NEED_DATA + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + + # Arrival of more data triggers pause + assert c.next_event() is NEED_DATA + c.receive_data(b"SADF") + assert c.next_event() is PAUSED + assert c.trailing_data == (b"SADF", False) + # If EOF arrives while paused, we don't see that either: + c.receive_data(b"") + assert c.trailing_data == (b"SADF", True) + assert c.next_event() is PAUSED + c.receive_data(b"") + assert c.next_event() is PAUSED + # Can't call receive_data with non-empty buf after closing it + with pytest.raises(RuntimeError): + c.receive_data(b"FDSA") + + +def test_protocol_switch() -> None: + for (req, deny, accept) in [ + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + ), + ( + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + InformationalResponse(status_code=101, headers=[("Upgrade", "a")]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + # Accept CONNECT, not upgrade + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + # Accept Upgrade, not CONNECT + InformationalResponse(status_code=101, headers=[("Upgrade", "b")]), + ), + ]: + + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send(CLIENT, req) + # No switch-related state change stuff yet; the client has to + # finish the request before that kicks in + for conn in p.conns: + assert conn.states[CLIENT] is SEND_BODY + p.send(CLIENT, [Data(data=b"1"), EndOfMessage()]) + for conn in p.conns: + assert conn.states[CLIENT] is MIGHT_SWITCH_PROTOCOL + assert p.conn[SERVER].next_event() is PAUSED + return p + + # Test deny case + p = setup() + p.send(SERVER, deny) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + p.send(SERVER, EndOfMessage()) + # Check that re-use is still allowed after a denial + for conn in p.conns: + conn.start_next_cycle() + + # Test accept case + p = setup() + p.send(SERVER, accept) + for conn in p.conns: + assert conn.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + conn.receive_data(b"123") + assert conn.next_event() is PAUSED + conn.receive_data(b"456") + assert conn.next_event() is PAUSED + assert conn.trailing_data == (b"123456", False) + + # Pausing in might-switch, then recovery + # (weird artificial case where the trailing data actually is valid + # HTTP for some reason, because this makes it easier to test the state + # logic) + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"GET / HTTP/1.0\r\n\r\n") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"GET / HTTP/1.0\r\n\r\n", False) + sc.send(deny) + assert sc.next_event() is PAUSED + sc.send(EndOfMessage()) + sc.start_next_cycle() + assert get_all_events(sc) == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] + EndOfMessage(), + ] + + # When we're DONE, have no trailing data, and the connection gets + # closed, we report ConnectionClosed(). When we're in might-switch or + # switched, we don't. + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"", True) + p.send(SERVER, accept) + assert sc.next_event() is PAUSED + + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") + assert sc.next_event() is PAUSED + sc.send(deny) + assert sc.next_event() == ConnectionClosed() + + # You can't send after switching protocols, or while waiting for a + # protocol switch + p = setup() + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send( + Request(method="GET", target="/", headers=[("Host", "a")]) + ) + p = setup() + p.send(SERVER, accept) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(Data(data=b"123")) + + +def test_close_simple() -> None: + # Just immediately closing a new connection without anything having + # happened yet. + for (who_shot_first, who_shot_second) in [(CLIENT, SERVER), (SERVER, CLIENT)]: + + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send(who_shot_first, ConnectionClosed()) + for conn in p.conns: + assert conn.states == { + who_shot_first: CLOSED, + who_shot_second: MUST_CLOSE, + } + return p + + # You can keep putting b"" into a closed connection, and you keep + # getting ConnectionClosed() out: + p = setup() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + p.conn[who_shot_second].receive_data(b"") + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + # Second party can close... + p = setup() + p.send(who_shot_second, ConnectionClosed()) + for conn in p.conns: + assert conn.our_state is CLOSED + assert conn.their_state is CLOSED + # But trying to receive new data on a closed connection is a + # RuntimeError (not ProtocolError, because the problem here isn't + # violation of HTTP, it's violation of physics) + p = setup() + with pytest.raises(RuntimeError): + p.conn[who_shot_second].receive_data(b"123") + # And receiving new data on a MUST_CLOSE connection is a ProtocolError + p = setup() + p.conn[who_shot_first].receive_data(b"GET") + with pytest.raises(RemoteProtocolError): + p.conn[who_shot_first].next_event() + + +def test_close_different_states() -> None: + req = [ + Request(method="GET", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ] + resp = [ + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ] + + # Client before request + p = ConnectionPair() + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + + # Client after request + p = ConnectionPair() + p.send(CLIENT, req) + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + + # Server after request -> not allowed + p = ConnectionPair() + p.send(CLIENT, req) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(ConnectionClosed()) + p.conn[CLIENT].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[CLIENT].next_event() + + # Server after response + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(SERVER, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + # Both after closing (ConnectionClosed() is idempotent) + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + + # In the middle of sending -> not allowed + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", target="/", headers=[("Host", "a"), ("Content-Length", "10")] + ), + ) + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send(ConnectionClosed()) + p.conn[SERVER].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[SERVER].next_event() + + +# Receive several requests and then client shuts down their side of the +# connection; we can respond to each +def test_pipelined_close() -> None: + c = Connection(SERVER) + # 2 requests then a close + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + ) + c.receive_data(b"") + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.states[CLIENT] is DONE + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.states[SERVER] is DONE + c.start_next_cycle() + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ConnectionClosed(), + ] + assert c.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + c.send(ConnectionClosed()) + assert c.states == {CLIENT: CLOSED, SERVER: CLOSED} + + +def test_sendfile() -> None: + class SendfilePlaceholder: + def __len__(self) -> int: + return 10 + + placeholder = SendfilePlaceholder() + + def setup( + header: Tuple[str, str], http_version: str + ) -> Tuple[Connection, Optional[List[bytes]]]: + c = Connection(SERVER) + receive_and_get( + c, "GET / HTTP/{}\r\nHost: a\r\n\r\n".format(http_version).encode("ascii") + ) + headers = [] + if header: + headers.append(header) + c.send(Response(status_code=200, headers=headers)) + return c, c.send_with_data_passthrough(Data(data=placeholder)) # type: ignore + + c, data = setup(("Content-Length", "10"), "1.1") + assert data == [placeholder] # type: ignore + # Raises an error if the connection object doesn't think we've sent + # exactly 10 bytes + c.send(EndOfMessage()) + + _, data = setup(("Transfer-Encoding", "chunked"), "1.1") + assert placeholder in data # type: ignore + data[data.index(placeholder)] = b"x" * 10 # type: ignore + assert b"".join(data) == b"a\r\nxxxxxxxxxx\r\n" # type: ignore + + c, data = setup(None, "1.0") # type: ignore + assert data == [placeholder] # type: ignore + assert c.our_state is SEND_BODY + + +def test_errors() -> None: + # After a receive error, you can't receive + for role in [CLIENT, SERVER]: + c = Connection(our_role=role) + c.receive_data(b"gibberish\r\n\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + # Now any attempt to receive continues to raise + assert c.their_state is ERROR + assert c.our_state is not ERROR + print(c._cstate.states) + with pytest.raises(RemoteProtocolError): + c.next_event() + # But we can still yell at the client for sending us gibberish + if role is SERVER: + assert ( + c.send(Response(status_code=400, headers=[])) # type: ignore[arg-type] + == b"HTTP/1.1 400 \r\nConnection: close\r\n\r\n" + ) + + # After an error sending, you can no longer send + # (This is especially important for things like content-length errors, + # where there's complex internal state being modified) + def conn(role: Type[Sentinel]) -> Connection: + c = Connection(our_role=role) + if role is SERVER: + # Put it into the state where it *could* send a response... + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert c.our_state is SEND_RESPONSE + return c + + for role in [CLIENT, SERVER]: + if role is CLIENT: + # This HTTP/1.0 request won't be detected as bad until after we go + # through the state machine and hit the writing code + good = Request(method="GET", target="/", headers=[("Host", "example.com")]) + bad = Request( + method="GET", + target="/", + headers=[("Host", "example.com")], + http_version="1.0", + ) + elif role is SERVER: + good = Response(status_code=200, headers=[]) # type: ignore[arg-type,assignment] + bad = Response(status_code=200, headers=[], http_version="1.0") # type: ignore[arg-type,assignment] + # Make sure 'good' actually is good + c = conn(role) + c.send(good) + assert c.our_state is not ERROR + # Do that again, but this time sending 'bad' first + c = conn(role) + with pytest.raises(LocalProtocolError): + c.send(bad) + assert c.our_state is ERROR + assert c.their_state is not ERROR + # Now 'good' is not so good + with pytest.raises(LocalProtocolError): + c.send(good) + + # And check send_failed() too + c = conn(role) + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + # This is idempotent + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + + +def test_idle_receive_nothing() -> None: + # At one point this incorrectly raised an error + for role in [CLIENT, SERVER]: + c = Connection(role) + assert c.next_event() is NEED_DATA + + +def test_connection_drop() -> None: + c = Connection(SERVER) + c.receive_data(b"GET /") + assert c.next_event() is NEED_DATA + c.receive_data(b"") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_408_request_timeout() -> None: + # Should be able to send this spontaneously as a server without seeing + # anything from client + p = ConnectionPair() + p.send(SERVER, Response(status_code=408, headers=[(b"connection", b"close")])) + + +# This used to raise IndexError +def test_empty_request() -> None: + c = Connection(SERVER) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to raise IndexError +def test_empty_response() -> None: + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "a")])) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x01\x00\xa5", # Typical start of a TLS Client Hello + ], +) +def test_early_detection_of_invalid_request(data: bytes) -> None: + c = Connection(SERVER) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x03\x00\x31", # Typical start of a TLS Server Hello + ], +) +def test_early_detection_of_invalid_response(data: bytes) -> None: + c = Connection(CLIENT) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to give different headers for HEAD and GET. +# The correct way to handle HEAD is to put whatever headers we *would* have +# put if it were a GET -- even though we know that for HEAD, those headers +# will be ignored. +def test_HEAD_framing_headers() -> None: + def setup(method: bytes, http_version: bytes) -> Connection: + c = Connection(SERVER) + c.receive_data( + method + b" / HTTP/" + http_version + b"\r\n" + b"Host: example.com\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert type(c.next_event()) is EndOfMessage + return c + + for method in [b"GET", b"HEAD"]: + # No Content-Length, HTTP/1.1 peer, should use chunked + c = setup(method, b"1.1") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + # No Content-Length, HTTP/1.0 peer, frame with connection: close + c = setup(method, b"1.0") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] + b"Connection: close\r\n\r\n" + ) + + # Content-Length + Transfer-Encoding, TE wins + c = setup(method, b"1.1") + assert ( + c.send( + Response( + status_code=200, + headers=[ + ("Content-Length", "100"), + ("Transfer-Encoding", "chunked"), + ], + ) + ) + == b"HTTP/1.1 200 \r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + +def test_special_exceptions_for_lost_connection_in_message_body() -> None: + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 100\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"12345") + assert c.next_event() == Data(data=b"12345") + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "received 5 bytes" in str(excinfo.value) + assert "expected 100" in str(excinfo.value) + + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"8\r\n012345") + assert c.next_event().data == b"012345" # type: ignore + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "incomplete chunked read" in str(excinfo.value) diff --git a/venv/lib/python3.10/site-packages/h11/tests/test_events.py b/venv/lib/python3.10/site-packages/h11/tests/test_events.py new file mode 100644 index 0000000..bc6c313 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/tests/test_events.py @@ -0,0 +1,150 @@ +from http import HTTPStatus + +import pytest + +from .. import _events +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._util import LocalProtocolError + + +def test_events() -> None: + with pytest.raises(LocalProtocolError): + # Missing Host: + req = Request( + method="GET", target="/", headers=[("a", "b")], http_version="1.1" + ) + # But this is okay (HTTP/1.0) + req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.0") + # fields are normalized + assert req.method == b"GET" + assert req.target == b"/" + assert req.headers == [(b"a", b"b")] + assert req.http_version == b"1.0" + + # This is also okay -- has a Host (with weird capitalization, which is ok) + req = Request( + method="GET", + target="/", + headers=[("a", "b"), ("hOSt", "example.com")], + http_version="1.1", + ) + # we normalize header capitalization + assert req.headers == [(b"a", b"b"), (b"host", b"example.com")] + + # Multiple host is bad too + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.1", + ) + # Even for HTTP/1.0 + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.0", + ) + + # Header values are validated + for bad_char in "\x00\r\n\f\v": + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd" + bad_char)], + http_version="1.0", + ) + + # But for compatibility we allow non-whitespace control characters, even + # though they're forbidden by the spec. + Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd\x01\x02\x7f")], + http_version="1.0", + ) + + # Request target is validated + for bad_byte in b"\x00\x20\x7f\xee": + target = bytearray(b"/") + target.append(bad_byte) + with pytest.raises(LocalProtocolError): + Request( + method="GET", target=target, headers=[("Host", "a")], http_version="1.1" + ) + + # Request method is validated + with pytest.raises(LocalProtocolError): + Request( + method="GET / HTTP/1.1", + target=target, + headers=[("Host", "a")], + http_version="1.1", + ) + + ir = InformationalResponse(status_code=100, headers=[("Host", "a")]) + assert ir.status_code == 100 + assert ir.headers == [(b"host", b"a")] + assert ir.http_version == b"1.1" + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=200, headers=[("Host", "a")]) + + resp = Response(status_code=204, headers=[], http_version="1.0") # type: ignore[arg-type] + assert resp.status_code == 204 + assert resp.headers == [] + assert resp.http_version == b"1.0" + + with pytest.raises(LocalProtocolError): + resp = Response(status_code=100, headers=[], http_version="1.0") # type: ignore[arg-type] + + with pytest.raises(LocalProtocolError): + Response(status_code="100", headers=[], http_version="1.0") # type: ignore[arg-type] + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=b"100", headers=[], http_version="1.0") # type: ignore[arg-type] + + d = Data(data=b"asdf") + assert d.data == b"asdf" + + eom = EndOfMessage() + assert eom.headers == [] + + cc = ConnectionClosed() + assert repr(cc) == "ConnectionClosed()" + + +def test_intenum_status_code() -> None: + # https://github.com/python-hyper/h11/issues/72 + + r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") # type: ignore[arg-type] + assert r.status_code == HTTPStatus.OK + assert type(r.status_code) is not type(HTTPStatus.OK) + assert type(r.status_code) is int + + +def test_header_casing() -> None: + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert len(r.headers) == 2 + assert r.headers[0] == (b"host", b"example.org") + assert r.headers == [(b"host", b"example.org"), (b"connection", b"keep-alive")] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive"), + ] diff --git a/venv/lib/python3.10/site-packages/h11/tests/test_headers.py b/venv/lib/python3.10/site-packages/h11/tests/test_headers.py new file mode 100644 index 0000000..ba53d08 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/tests/test_headers.py @@ -0,0 +1,157 @@ +import pytest + +from .._events import Request +from .._headers import ( + get_comma_header, + has_expect_100_continue, + Headers, + normalize_and_validate, + set_comma_header, +) +from .._util import LocalProtocolError + + +def test_normalize_and_validate() -> None: + assert normalize_and_validate([("foo", "bar")]) == [(b"foo", b"bar")] + assert normalize_and_validate([(b"foo", b"bar")]) == [(b"foo", b"bar")] + + # no leading/trailing whitespace in names + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo ", "bar")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b" foo", "bar")]) + + # no weird characters in names + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([(b"foo bar", b"baz")]) + assert "foo bar" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x00bar", b"baz")]) + # Not even 8-bit characters: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\xffbar", b"baz")]) + # And not even the control characters we allow in values: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x01bar", b"baz")]) + + # no return or NUL characters in values + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("foo", "bar\rbaz")]) + assert "bar\\rbaz" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\nbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\x00baz")]) + # no leading/trailing whitespace + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz ")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", " barbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz\t")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "\tbarbaz")]) + + # content-length + assert normalize_and_validate([("Content-Length", "1")]) == [ + (b"content-length", b"1") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "asdf")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1x")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1"), ("Content-Length", "2")]) + assert normalize_and_validate( + [("Content-Length", "0"), ("Content-Length", "0")] + ) == [(b"content-length", b"0")] + assert normalize_and_validate([("Content-Length", "0 , 0")]) == [ + (b"content-length", b"0") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate( + [("Content-Length", "1"), ("Content-Length", "1"), ("Content-Length", "2")] + ) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1 , 1,2")]) + + # transfer-encoding + assert normalize_and_validate([("Transfer-Encoding", "chunked")]) == [ + (b"transfer-encoding", b"chunked") + ] + assert normalize_and_validate([("Transfer-Encoding", "cHuNkEd")]) == [ + (b"transfer-encoding", b"chunked") + ] + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("Transfer-Encoding", "gzip")]) + assert excinfo.value.error_status_hint == 501 # Not Implemented + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate( + [("Transfer-Encoding", "chunked"), ("Transfer-Encoding", "gzip")] + ) + assert excinfo.value.error_status_hint == 501 # Not Implemented + + +def test_get_set_comma_header() -> None: + headers = normalize_and_validate( + [ + ("Connection", "close"), + ("whatever", "something"), + ("connectiON", "fOo,, , BAR"), + ] + ) + + assert get_comma_header(headers, b"connection") == [b"close", b"foo", b"bar"] + + headers = set_comma_header(headers, b"newthing", ["a", "b"]) # type: ignore + + with pytest.raises(LocalProtocolError): + set_comma_header(headers, b"newthing", [" a", "b"]) # type: ignore + + assert headers == [ + (b"connection", b"close"), + (b"whatever", b"something"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + ] + + headers = set_comma_header(headers, b"whatever", ["different thing"]) # type: ignore + + assert headers == [ + (b"connection", b"close"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + (b"whatever", b"different thing"), + ] + + +def test_has_100_continue() -> None: + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + ) + ) + assert not has_expect_100_continue( + Request(method="GET", target="/", headers=[("Host", "example.com")]) + ) + # Case insensitive + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-Continue")], + ) + ) + # Doesn't work in HTTP/1.0 + assert not has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + http_version="1.0", + ) + ) diff --git a/venv/lib/python3.10/site-packages/h11/tests/test_helpers.py b/venv/lib/python3.10/site-packages/h11/tests/test_helpers.py new file mode 100644 index 0000000..c329c76 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/tests/test_helpers.py @@ -0,0 +1,32 @@ +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .helpers import normalize_data_events + + +def test_normalize_data_events() -> None: + assert normalize_data_events( + [ + Data(data=bytearray(b"1")), + Data(data=b"2"), + Response(status_code=200, headers=[]), # type: ignore[arg-type] + Data(data=b"3"), + Data(data=b"4"), + EndOfMessage(), + Data(data=b"5"), + Data(data=b"6"), + Data(data=b"7"), + ] + ) == [ + Data(data=b"12"), + Response(status_code=200, headers=[]), # type: ignore[arg-type] + Data(data=b"34"), + EndOfMessage(), + Data(data=b"567"), + ] diff --git a/venv/lib/python3.10/site-packages/h11/tests/test_io.py b/venv/lib/python3.10/site-packages/h11/tests/test_io.py new file mode 100644 index 0000000..e9c01bd --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/tests/test_io.py @@ -0,0 +1,566 @@ +from typing import Any, Callable, Generator, List + +import pytest + +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._headers import Headers, normalize_and_validate +from .._readers import ( + _obsolete_line_fold, + ChunkedReader, + ContentLengthReader, + Http10Reader, + READERS, +) +from .._receivebuffer import ReceiveBuffer +from .._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError +from .._writers import ( + ChunkedWriter, + ContentLengthWriter, + Http10Writer, + write_any_response, + write_headers, + write_request, + WRITERS, +) +from .helpers import normalize_data_events + +SIMPLE_CASES = [ + ( + (CLIENT, IDLE), + Request( + method="GET", + target="/a", + headers=[("Host", "foo"), ("Connection", "close")], + ), + b"GET /a HTTP/1.1\r\nHost: foo\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[("Connection", "close")], reason=b"OK"), + b"HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[], reason=b"OK"), # type: ignore[arg-type] + b"HTTP/1.1 200 OK\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse( + status_code=101, headers=[("Upgrade", "websocket")], reason=b"Upgrade" + ), + b"HTTP/1.1 101 Upgrade\r\nUpgrade: websocket\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), # type: ignore[arg-type] + b"HTTP/1.1 101 Upgrade\r\n\r\n", + ), +] + + +def dowrite(writer: Callable[..., None], obj: Any) -> bytes: + got_list: List[bytes] = [] + writer(obj, got_list.append) + return b"".join(got_list) + + +def tw(writer: Any, obj: Any, expected: Any) -> None: + got = dowrite(writer, obj) + assert got == expected + + +def makebuf(data: bytes) -> ReceiveBuffer: + buf = ReceiveBuffer() + buf += data + return buf + + +def tr(reader: Any, data: bytes, expected: Any) -> None: + def check(got: Any) -> None: + assert got == expected + # Headers should always be returned as bytes, not e.g. bytearray + # https://github.com/python-hyper/wsproto/pull/54#issuecomment-377709478 + for name, value in getattr(got, "headers", []): + assert type(name) is bytes + assert type(value) is bytes + + # Simple: consume whole thing + buf = makebuf(data) + check(reader(buf)) + assert not buf + + # Incrementally growing buffer + buf = ReceiveBuffer() + for i in range(len(data)): + assert reader(buf) is None + buf += data[i : i + 1] + check(reader(buf)) + + # Trailing data + buf = makebuf(data) + buf += b"trailing" + check(reader(buf)) + assert bytes(buf) == b"trailing" + + +def test_writers_simple() -> None: + for ((role, state), event, binary) in SIMPLE_CASES: + tw(WRITERS[role, state], event, binary) + + +def test_readers_simple() -> None: + for ((role, state), event, binary) in SIMPLE_CASES: + tr(READERS[role, state], binary, event) + + +def test_writers_unusual() -> None: + # Simple test of the write_headers utility routine + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("baz", "quux")]), + b"foo: bar\r\nbaz: quux\r\n\r\n", + ) + tw(write_headers, Headers([]), b"\r\n") + + # We understand HTTP/1.0, but we don't speak it + with pytest.raises(LocalProtocolError): + tw( + write_request, + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Connection", "close")], + http_version="1.0", + ), + None, + ) + with pytest.raises(LocalProtocolError): + tw( + write_any_response, + Response( + status_code=200, headers=[("Connection", "close")], http_version="1.0" + ), + None, + ) + + +def test_readers_unusual() -> None: + # Reading HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\nSome: header\r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[("Some", "header")], + http_version="1.0", + ), + ) + + # check no-headers, since it's only legal with HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\n\r\n", + Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), # type: ignore[arg-type] + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\nSome: header\r\n\r\n", + Response( + status_code=200, + headers=[("Some", "header")], + http_version="1.0", + reason=b"OK", + ), + ) + + # single-character header values (actually disallowed by the ABNF in RFC + # 7230 -- this is a bug in the standard that we originally copied...) + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: a a a a a \r\n\r\n", + Response( + status_code=200, + headers=[("Foo", "a a a a a")], + http_version="1.0", + reason=b"OK", + ), + ) + + # Empty headers -- also legal + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo:\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: \t \t \r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + # Tolerate broken servers that leave off the response code + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200\r\n" b"Foo: bar\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "bar")], http_version="1.0", reason=b"" + ), + ) + + # Tolerate headers line endings (\r\n and \n) + # \n\r\b between headers and body + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader: val\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader", "val")], + http_version="1.1", + reason="OK", + ), + ) + + # delimited only with \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\nSomeHeader1: val1\nSomeHeader2: val2\n\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # mixed \r\n and \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader1: val1\nSomeHeader2: val2\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # obsolete line folding + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Some: multi-line\r\n" + b" header\r\n" + b"\tnonsense\r\n" + b" \t \t\tI guess\r\n" + b"Connection: close\r\n" + b"More-nonsense: in the\r\n" + b" last header \r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "example.com"), + ("Some", "multi-line header nonsense I guess"), + ("Connection", "close"), + ("More-nonsense", "in the last header"), + ], + ), + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b" folded: line\r\n\r\n", + None, + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo : line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b": line\r\n\r\n", None) + + +def test__obsolete_line_fold_bytes() -> None: + # _obsolete_line_fold has a defensive cast to bytearray, which is + # necessary to protect against O(n^2) behavior in case anyone ever passes + # in regular bytestrings... but right now we never pass in regular + # bytestrings. so this test just exists to get some coverage on that + # defensive cast. + assert list(_obsolete_line_fold([b"aaa", b"bbb", b" ccc", b"ddd"])) == [ + b"aaa", + bytearray(b"bbb ccc"), + b"ddd", + ] + + +def _run_reader_iter( + reader: Any, buf: bytes, do_eof: bool +) -> Generator[Any, None, None]: + while True: + event = reader(buf) + if event is None: + break + yield event + # body readers have undefined behavior after returning EndOfMessage, + # because this changes the state so they don't get called again + if type(event) is EndOfMessage: + break + if do_eof: + assert not buf + yield reader.read_eof() + + +def _run_reader(*args: Any) -> List[Event]: + events = list(_run_reader_iter(*args)) + return normalize_data_events(events) + + +def t_body_reader(thunk: Any, data: bytes, expected: Any, do_eof: bool = False) -> None: + # Simple: consume whole thing + print("Test 1") + buf = makebuf(data) + assert _run_reader(thunk(), buf, do_eof) == expected + + # Incrementally growing buffer + print("Test 2") + reader = thunk() + buf = ReceiveBuffer() + events = [] + for i in range(len(data)): + events += _run_reader(reader, buf, False) + buf += data[i : i + 1] + events += _run_reader(reader, buf, do_eof) + assert normalize_data_events(events) == expected + + is_complete = any(type(event) is EndOfMessage for event in expected) + if is_complete and not do_eof: + buf = makebuf(data + b"trailing") + assert _run_reader(thunk(), buf, False) == expected + + +def test_ContentLengthReader() -> None: + t_body_reader(lambda: ContentLengthReader(0), b"", [EndOfMessage()]) + + t_body_reader( + lambda: ContentLengthReader(10), + b"0123456789", + [Data(data=b"0123456789"), EndOfMessage()], + ) + + +def test_Http10Reader() -> None: + t_body_reader(Http10Reader, b"", [EndOfMessage()], do_eof=True) + t_body_reader(Http10Reader, b"asdf", [Data(data=b"asdf")], do_eof=False) + t_body_reader( + Http10Reader, b"asdf", [Data(data=b"asdf"), EndOfMessage()], do_eof=True + ) + + +def test_ChunkedReader() -> None: + t_body_reader(ChunkedReader, b"0\r\n\r\n", [EndOfMessage()]) + + t_body_reader( + ChunkedReader, + b"0\r\nSome: header\r\n\r\n", + [EndOfMessage(headers=[("Some", "header")])], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + + b"10\r\n0123456789abcdef\r\n" + + b"0\r\n" + + b"Some: header\r\n\r\n", + [ + Data(data=b"012340123456789abcdef"), + EndOfMessage(headers=[("Some", "header")]), + ], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + b"10\r\n0123456789abcdef\r\n" + b"0\r\n\r\n", + [Data(data=b"012340123456789abcdef"), EndOfMessage()], + ) + + # handles upper and lowercase hex + t_body_reader( + ChunkedReader, + b"aA\r\n" + b"x" * 0xAA + b"\r\n" + b"0\r\n\r\n", + [Data(data=b"x" * 0xAA), EndOfMessage()], + ) + + # refuses arbitrarily long chunk integers + with pytest.raises(LocalProtocolError): + # Technically this is legal HTTP/1.1, but we refuse to process chunk + # sizes that don't fit into 20 characters of hex + t_body_reader(ChunkedReader, b"9" * 100 + b"\r\nxxx", [Data(data=b"xxx")]) + + # refuses garbage in the chunk count + with pytest.raises(LocalProtocolError): + t_body_reader(ChunkedReader, b"10\x00\r\nxxx", None) + + # handles (and discards) "chunk extensions" omg wtf + t_body_reader( + ChunkedReader, + b"5; hello=there\r\n" + + b"xxxxx" + + b"\r\n" + + b'0; random="junk"; some=more; canbe=lonnnnngg\r\n\r\n', + [Data(data=b"xxxxx"), EndOfMessage()], + ) + + +def test_ContentLengthWriter() -> None: + w = ContentLengthWriter(5) + assert dowrite(w, Data(data=b"123")) == b"123" + assert dowrite(w, Data(data=b"45")) == b"45" + assert dowrite(w, EndOfMessage()) == b"" + + w = ContentLengthWriter(5) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"123456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage()) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) == b"123" + dowrite(w, Data(data=b"45")) == b"45" + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_ChunkedWriter() -> None: + w = ChunkedWriter() + assert dowrite(w, Data(data=b"aaa")) == b"3\r\naaa\r\n" + assert dowrite(w, Data(data=b"a" * 20)) == b"14\r\n" + b"a" * 20 + b"\r\n" + + assert dowrite(w, Data(data=b"")) == b"" + + assert dowrite(w, EndOfMessage()) == b"0\r\n\r\n" + + assert ( + dowrite(w, EndOfMessage(headers=[("Etag", "asdf"), ("a", "b")])) + == b"0\r\nEtag: asdf\r\na: b\r\n\r\n" + ) + + +def test_Http10Writer() -> None: + w = Http10Writer() + assert dowrite(w, Data(data=b"1234")) == b"1234" + assert dowrite(w, EndOfMessage()) == b"" + + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_reject_garbage_after_request_line() -> None: + with pytest.raises(LocalProtocolError): + tr(READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\x00xxxx\r\n\r\n", None) + + +def test_reject_garbage_after_response_line() -> None: + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1 xxxxxx\r\n" b"Host: a\r\n\r\n", + None, + ) + + +def test_reject_garbage_in_header_line() -> None: + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"Host: foo\x00bar\r\n\r\n", + None, + ) + + +def test_reject_non_vchar_in_path() -> None: + for bad_char in b"\x00\x20\x7f\xee": + message = bytearray(b"HEAD /") + message.append(bad_char) + message.extend(b" HTTP/1.1\r\nHost: foobar\r\n\r\n") + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], message, None) + + +# https://github.com/python-hyper/h11/issues/57 +def test_allow_some_garbage_in_cookies() -> None: + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: foo\r\n" + b"Set-Cookie: ___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900\r\n" + b"\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "foo"), + ("Set-Cookie", "___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900"), + ], + ), + ) + + +def test_host_comes_first() -> None: + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("Host", "example.com")]), + b"Host: example.com\r\nfoo: bar\r\n\r\n", + ) diff --git a/venv/lib/python3.10/site-packages/h11/tests/test_receivebuffer.py b/venv/lib/python3.10/site-packages/h11/tests/test_receivebuffer.py new file mode 100644 index 0000000..21a3870 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/tests/test_receivebuffer.py @@ -0,0 +1,135 @@ +import re +from typing import Tuple + +import pytest + +from .._receivebuffer import ReceiveBuffer + + +def test_receivebuffer() -> None: + b = ReceiveBuffer() + assert not b + assert len(b) == 0 + assert bytes(b) == b"" + + b += b"123" + assert b + assert len(b) == 3 + assert bytes(b) == b"123" + + assert bytes(b) == b"123" + + assert b.maybe_extract_at_most(2) == b"12" + assert b + assert len(b) == 1 + assert bytes(b) == b"3" + + assert bytes(b) == b"3" + + assert b.maybe_extract_at_most(10) == b"3" + assert bytes(b) == b"" + + assert b.maybe_extract_at_most(10) is None + assert not b + + ################################################################ + # maybe_extract_until_next + ################################################################ + + b += b"123\n456\r\n789\r\n" + + assert b.maybe_extract_next_line() == b"123\n456\r\n" + assert bytes(b) == b"789\r\n" + + assert b.maybe_extract_next_line() == b"789\r\n" + assert bytes(b) == b"" + + b += b"12\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r" + + b += b"345\n\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r345\n\r" + + # here we stopped at the middle of b"\r\n" delimiter + + b += b"\n6789aaa123\r\n" + assert b.maybe_extract_next_line() == b"12\r345\n\r\n" + assert b.maybe_extract_next_line() == b"6789aaa123\r\n" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"" + + ################################################################ + # maybe_extract_lines + ################################################################ + + b += b"123\r\na: b\r\nfoo:bar\r\n\r\ntrailing" + lines = b.maybe_extract_lines() + assert lines == [b"123", b"a: b", b"foo:bar"] + assert bytes(b) == b"trailing" + + assert b.maybe_extract_lines() is None + + b += b"\r\n\r" + assert b.maybe_extract_lines() is None + + assert b.maybe_extract_at_most(100) == b"trailing\r\n\r" + assert not b + + # Empty body case (as happens at the end of chunked encoding if there are + # no trailing headers, e.g.) + b += b"\r\ntrailing" + assert b.maybe_extract_lines() == [] + assert bytes(b) == b"trailing" + + +@pytest.mark.parametrize( + "data", + [ + pytest.param( + ( + b"HTTP/1.1 200 OK\r\n", + b"Content-type: text/plain\r\n", + b"Connection: close\r\n", + b"\r\n", + b"Some body", + ), + id="with_crlf_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_lf_only_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\r\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_mixed_crlf_and_lf", + ), + ], +) +def test_receivebuffer_for_invalid_delimiter(data: Tuple[bytes]) -> None: + b = ReceiveBuffer() + + for line in data: + b += line + + lines = b.maybe_extract_lines() + + assert lines == [ + b"HTTP/1.1 200 OK", + b"Content-type: text/plain", + b"Connection: close", + ] + assert bytes(b) == b"Some body" diff --git a/venv/lib/python3.10/site-packages/h11/tests/test_state.py b/venv/lib/python3.10/site-packages/h11/tests/test_state.py new file mode 100644 index 0000000..bc974e6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/tests/test_state.py @@ -0,0 +1,271 @@ +import pytest + +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + CLOSED, + ConnectionState, + DONE, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError + + +def test_ConnectionState() -> None: + cs = ConnectionState() + + # Basic event-triggered transitions + + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + cs.process_event(CLIENT, Request) + # The SERVER-Request special case: + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # Illegal transitions raise an error and nothing happens + with pytest.raises(LocalProtocolError): + cs.process_event(CLIENT, Request) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: DONE, SERVER: DONE} + + # State-triggered transition + + cs.process_event(SERVER, ConnectionClosed) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + +def test_ConnectionState_keep_alive() -> None: + # keep_alive = False + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_ConnectionState_keep_alive_in_DONE() -> None: + # Check that if keep_alive is disabled when the CLIENT is already in DONE, + # then this is sufficient to immediately trigger the DONE -> MUST_CLOSE + # transition + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states[CLIENT] is DONE + cs.process_keep_alive_disabled() + assert cs.states[CLIENT] is MUST_CLOSE + + +def test_ConnectionState_switch_denied() -> None: + for switch_type in (_SWITCH_CONNECT, _SWITCH_UPGRADE): + for deny_early in (True, False): + cs = ConnectionState() + cs.process_client_switch_proposal(switch_type) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + assert switch_type in cs.pending_switch_proposals + + if deny_early: + # before client reaches DONE + cs.process_event(SERVER, Response) + assert not cs.pending_switch_proposals + + cs.process_event(CLIENT, EndOfMessage) + + if deny_early: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + assert not cs.pending_switch_proposals + + +_response_type_for_switch = { + _SWITCH_UPGRADE: InformationalResponse, + _SWITCH_CONNECT: Response, + None: Response, +} + + +def test_ConnectionState_protocol_switch_accepted() -> None: + for switch_event in [_SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(switch_event) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, _response_type_for_switch[switch_event], switch_event) + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_double_protocol_switch() -> None: + # CONNECT + Upgrade is legal! Very silly, but legal. So we support + # it. Because sometimes doing the silly thing is easier than not. + for server_switch in [None, _SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_client_switch_proposal(_SWITCH_CONNECT) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + cs.process_event( + SERVER, _response_type_for_switch[server_switch], server_switch + ) + if server_switch is None: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_inconsistent_protocol_switch() -> None: + for client_switches, server_switch in [ + ([], _SWITCH_CONNECT), + ([], _SWITCH_UPGRADE), + ([_SWITCH_UPGRADE], _SWITCH_CONNECT), + ([_SWITCH_CONNECT], _SWITCH_UPGRADE), + ]: + cs = ConnectionState() + for client_switch in client_switches: # type: ignore[attr-defined] + cs.process_client_switch_proposal(client_switch) + cs.process_event(CLIENT, Request) + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Response, server_switch) + + +def test_ConnectionState_keepalive_protocol_switch_interaction() -> None: + # keep_alive=False + pending_switch_proposals + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # the protocol switch "wins" + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + # but when the server denies the request, keep_alive comes back into play + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_BODY} + + +def test_ConnectionState_reuse() -> None: + cs = ConnectionState() + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + # No keepalive + + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # One side closed + + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(CLIENT, ConnectionClosed) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Succesful protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, InformationalResponse, _SWITCH_UPGRADE) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Failed protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + +def test_server_request_is_illegal() -> None: + # There used to be a bug in how we handled the Request special case that + # made this allowed... + cs = ConnectionState() + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Request) diff --git a/venv/lib/python3.10/site-packages/h11/tests/test_util.py b/venv/lib/python3.10/site-packages/h11/tests/test_util.py new file mode 100644 index 0000000..1637919 --- /dev/null +++ b/venv/lib/python3.10/site-packages/h11/tests/test_util.py @@ -0,0 +1,112 @@ +import re +import sys +import traceback +from typing import NoReturn + +import pytest + +from .._util import ( + bytesify, + LocalProtocolError, + ProtocolError, + RemoteProtocolError, + Sentinel, + validate, +) + + +def test_ProtocolError() -> None: + with pytest.raises(TypeError): + ProtocolError("abstract base class") + + +def test_LocalProtocolError() -> None: + try: + raise LocalProtocolError("foo") + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 400 + + try: + raise LocalProtocolError("foo", error_status_hint=418) + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 418 + + def thunk() -> NoReturn: + raise LocalProtocolError("a", error_status_hint=420) + + try: + try: + thunk() + except LocalProtocolError as exc1: + orig_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + exc1._reraise_as_remote_protocol_error() + except RemoteProtocolError as exc2: + assert type(exc2) is RemoteProtocolError + assert exc2.args == ("a",) + assert exc2.error_status_hint == 420 + new_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + assert new_traceback.endswith(orig_traceback) + + +def test_validate() -> None: + my_re = re.compile(br"(?P[0-9]+)\.(?P[0-9]+)") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.") + + groups = validate(my_re, b"0.1") + assert groups == {"group1": b"0", "group2": b"1"} + + # successful partial matches are an error - must match whole string + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1xx") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1\n") + + +def test_validate_formatting() -> None: + my_re = re.compile(br"foo") + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops") + assert "oops" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {}") + assert "oops {}" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {} xx", 10) + assert "oops 10 xx" in str(excinfo.value) + + +def test_make_sentinel() -> None: + class S(Sentinel, metaclass=Sentinel): + pass + + assert repr(S) == "S" + assert S == S + assert type(S).__name__ == "S" + assert S in {S} + assert type(S) is S + + class S2(Sentinel, metaclass=Sentinel): + pass + + assert repr(S2) == "S2" + assert S != S2 + assert S not in {S2} + assert type(S) is not type(S2) + + +def test_bytesify() -> None: + assert bytesify(b"123") == b"123" + assert bytesify(bytearray(b"123")) == b"123" + assert bytesify("123") == b"123" + + with pytest.raises(UnicodeEncodeError): + bytesify("\u1234") + + with pytest.raises(TypeError): + bytesify(10) diff --git a/venv/lib/python3.10/site-packages/idna-3.3.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/idna-3.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna-3.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/idna-3.3.dist-info/LICENSE.md b/venv/lib/python3.10/site-packages/idna-3.3.dist-info/LICENSE.md new file mode 100644 index 0000000..b6f8732 --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna-3.3.dist-info/LICENSE.md @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2013-2021, Kim Davies +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.10/site-packages/idna-3.3.dist-info/METADATA b/venv/lib/python3.10/site-packages/idna-3.3.dist-info/METADATA new file mode 100644 index 0000000..6446805 --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna-3.3.dist-info/METADATA @@ -0,0 +1,236 @@ +Metadata-Version: 2.1 +Name: idna +Version: 3.3 +Summary: Internationalized Domain Names in Applications (IDNA) +Home-page: https://github.com/kjd/idna +Author: Kim Davies +Author-email: kim@cynosure.com.au +License: BSD-3-Clause +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +Requires-Python: >=3.5 +License-File: LICENSE.md + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This is the latest version of the protocol and is sometimes referred to as +“IDNA 2008”. + +This library also provides support for Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +This acts as a suitable replacement for the “encodings.idna” module that +comes with the Python standard library, but which only supports the +older superseded IDNA specification (`RFC 3490 `_). + +Basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + +Installation +------------ + +To install this library, you can use pip: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to A-labels or U-labels respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + >>> import idna.codec + >>> print('домен.испытание'.encode('idna')) + b'xn--d1acufc.xn--80akhbyknj4f' + >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna')) + домен.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel('测试') + b'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the IDNA +specification does not normalize input from different potential ways a user +may input a domain name. This functionality, known as a “mapping”, is +considered by the specification to be a local user-interface issue distinct +from IDNA conversion functionality. + +This library provides one such mapping, that was developed by the Unicode +Consortium. Known as `Unicode IDNA Compatibility Processing `_, +it provides for both a regular mapping for typical applications, as well as +a transitional mapping to help migrate from older IDNA 2003 applications. + +For example, “Königsgäßchen” is not a permissible label as *LATIN CAPITAL +LETTER K* is not allowed (nor are capital letters in general). UTS 46 will +convert this into lower case prior to applying the IDNA conversion. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from the older +2003 standard to the current standard. For example, in the original IDNA +specification, the *LATIN SMALL LETTER SHARP S* (ß) was converted into two +*LATIN SMALL LETTER S* (ss), whereas in the current IDNA specification this +conversion is not performed. + +.. code-block:: pycon + + >>> idna.encode('Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementors should use transitional processing with caution, only in rare +cases where conversion from legacy labels to current labels must be performed +(i.e. IDNA implementations that pre-date 2008). For typical applications +that just need to convert labels, transitional processing is unlikely to be +beneficial and could produce unexpected incompatible results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and +right-to-left characters in a label; ``idna.InvalidCodepoint`` when +a specific codepoint is an illegal character in an IDN label (i.e. +INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is +illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ +but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup +tables for performance. These tables are derived from computing against +eligibility criteria in the respective standards. These tables are +computed using the command-line script ``tools/idna-data``. + +This tool will fetch relevant codepoint data from the Unicode repository +and perform the required calculations to identify eligibility. There are +three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and ``uts46data.py``, + the pre-calculated lookup tables using for IDNA and UTS 46 conversions. Implementors + who wish to track this library against a different Unicode version may use this tool + to manually generate a different version of the ``idnadata.py`` and ``uts46data.py`` + files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix B.1 of RFC + 5892 and the pre-computed tables published by `IANA `_. + +* ``idna-data U+0061``. Prints debugging output on the various properties + associated with an individual Unicode codepoint (in this case, U+0061), that are + used to assess the IDNA and UTS 46 status of a codepoint. This is helpful in debugging + or analysis. + +The tool accepts a number of arguments, described using ``idna-data -h``. Most notably, +the ``--version`` argument allows the specification of the version of Unicode to use +in computing the table data. For example, ``idna-data --version 9.0.0 make-libdata`` +will generate library data against Unicode 9.0.0. + + +Additional Notes +---------------- + +* **Packages**. The latest tagged release version is published in the + `Python Package Index `_. + +* **Version support**. This library supports Python 3.5 and higher. As this library + serves as a low-level toolkit for a variety of applications, many of which strive + for broad compatibility with older Python versions, there is no rush to remove + older intepreter support. Removing support for older versions should be well + justified in that the maintenance burden has become too high. + +* **Python 2**. Python 2 is supported by version 2.x of this library. While active + development of the version 2.x series has ended, notable issues being corrected + may be backported to 2.x. Use "idna<3" in your requirements file if you need this + library for a Python 2 application. + +* **Testing**. The library has a test suite based on each rule of the IDNA specification, as + well as tests that are provided as part of the Unicode Technical Standard 46, + `Unicode IDNA Compatibility Processing `_. + +* **Emoji**. It is an occasional request to support emoji domains in this library. Encoding + of symbols like emoji is expressly prohibited by the technical standard IDNA 2008 and + emoji domains are broadly phased out across the domain industry due to associated security + risks. For now, applications that wish need to support these non-compliant labels may + wish to consider trying the encode/decode operation in this library first, and then falling + back to using `encodings.idna`. See `the Github project `_ + for more discussion. + diff --git a/venv/lib/python3.10/site-packages/idna-3.3.dist-info/RECORD b/venv/lib/python3.10/site-packages/idna-3.3.dist-info/RECORD new file mode 100644 index 0000000..6aadca1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna-3.3.dist-info/RECORD @@ -0,0 +1,23 @@ +idna-3.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna-3.3.dist-info/LICENSE.md,sha256=otbk2UC9JNvnuWRc3hmpeSzFHbeuDVrNMBrIYMqj6DY,1523 +idna-3.3.dist-info/METADATA,sha256=BdqiAf8ou4x1nzIHp2_sDfXWjl7BrSUGpOeVzbYHQuQ,9765 +idna-3.3.dist-info/RECORD,, +idna-3.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +idna-3.3.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5 +idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 +idna/__pycache__/__init__.cpython-310.pyc,, +idna/__pycache__/codec.cpython-310.pyc,, +idna/__pycache__/compat.cpython-310.pyc,, +idna/__pycache__/core.cpython-310.pyc,, +idna/__pycache__/idnadata.cpython-310.pyc,, +idna/__pycache__/intranges.cpython-310.pyc,, +idna/__pycache__/package_data.cpython-310.pyc,, +idna/__pycache__/uts46data.cpython-310.pyc,, +idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374 +idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 +idna/core.py,sha256=RFIkY-HhFZaDoBEFjGwyGd_vWI04uOAQjnzueMWqwOU,12795 +idna/idnadata.py,sha256=fzMzkCea2xieVxcrjngJ-2pLsKQNejPCZFlBajIuQdw,44025 +idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 +idna/package_data.py,sha256=szxQhV0ZD0nKJ84Kuobw3l8q4_KeCyXjFRdpwIpKZmw,21 +idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +idna/uts46data.py,sha256=o-D7V-a0fOLZNd7tvxof6MYfUd0TBZzE2bLR5XO67xU,204400 diff --git a/venv/lib/python3.10/site-packages/idna-3.3.dist-info/WHEEL b/venv/lib/python3.10/site-packages/idna-3.3.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna-3.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/idna-3.3.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/idna-3.3.dist-info/top_level.txt new file mode 100644 index 0000000..c40472e --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna-3.3.dist-info/top_level.txt @@ -0,0 +1 @@ +idna diff --git a/venv/lib/python3.10/site-packages/idna/__init__.py b/venv/lib/python3.10/site-packages/idna/__init__.py new file mode 100644 index 0000000..a40eeaf --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/__init__.py @@ -0,0 +1,44 @@ +from .package_data import __version__ +from .core import ( + IDNABidiError, + IDNAError, + InvalidCodepoint, + InvalidCodepointContext, + alabel, + check_bidi, + check_hyphen_ok, + check_initial_combiner, + check_label, + check_nfc, + decode, + encode, + ulabel, + uts46_remap, + valid_contextj, + valid_contexto, + valid_label_length, + valid_string_length, +) +from .intranges import intranges_contain + +__all__ = [ + "IDNABidiError", + "IDNAError", + "InvalidCodepoint", + "InvalidCodepointContext", + "alabel", + "check_bidi", + "check_hyphen_ok", + "check_initial_combiner", + "check_label", + "check_nfc", + "decode", + "encode", + "intranges_contain", + "ulabel", + "uts46_remap", + "valid_contextj", + "valid_contexto", + "valid_label_length", + "valid_string_length", +] diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..2ca96b9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/codec.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/codec.cpython-310.pyc new file mode 100644 index 0000000..0e32fdf Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/codec.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/compat.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/compat.cpython-310.pyc new file mode 100644 index 0000000..ebc7a18 Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/compat.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc new file mode 100644 index 0000000..fb0e194 Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc new file mode 100644 index 0000000..0b8192c Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/intranges.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/intranges.cpython-310.pyc new file mode 100644 index 0000000..266fb69 Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/intranges.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/package_data.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/package_data.cpython-310.pyc new file mode 100644 index 0000000..dfd962f Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/package_data.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc new file mode 100644 index 0000000..66a73cd Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/codec.py b/venv/lib/python3.10/site-packages/idna/codec.py new file mode 100644 index 0000000..1ca9ba6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/codec.py @@ -0,0 +1,112 @@ +from .core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re +from typing import Tuple, Optional + +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return b"", 0 + + return encode(data), len(data) + + def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return '', 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return "", 0 + + labels = _unicode_dots_re.split(data) + trailing_dot = '' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result_str = '.'.join(result) + trailing_dot # type: ignore + size += len(trailing_dot) + return result_str, size + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return ('', 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = '' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result_str = '.'.join(result) + trailing_dot + size += len(trailing_dot) + return (result_str, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + + +class StreamReader(Codec, codecs.StreamReader): + pass + + +def getregentry() -> codecs.CodecInfo: + # Compatibility as a search_function for codecs.register() + return codecs.CodecInfo( + name='idna', + encode=Codec().encode, # type: ignore + decode=Codec().decode, # type: ignore + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) diff --git a/venv/lib/python3.10/site-packages/idna/compat.py b/venv/lib/python3.10/site-packages/idna/compat.py new file mode 100644 index 0000000..786e6bd --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/compat.py @@ -0,0 +1,13 @@ +from .core import * +from .codec import * +from typing import Any, Union + +def ToASCII(label: str) -> bytes: + return encode(label) + +def ToUnicode(label: Union[bytes, bytearray]) -> str: + return decode(label) + +def nameprep(s: Any) -> None: + raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol') + diff --git a/venv/lib/python3.10/site-packages/idna/core.py b/venv/lib/python3.10/site-packages/idna/core.py new file mode 100644 index 0000000..55ab967 --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/core.py @@ -0,0 +1,397 @@ +from . import idnadata +import bisect +import unicodedata +import re +from typing import Union, Optional +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp: int) -> int: + v = unicodedata.combining(chr(cp)) + if v == 0: + if not unicodedata.name(chr(cp)): + raise ValueError('Unknown character in unicodedata') + return v + +def _is_script(cp: str, script: str) -> bool: + return intranges_contain(ord(cp), idnadata.scripts[script]) + +def _punycode(s: str) -> bytes: + return s.encode('punycode') + +def _unot(s: int) -> str: + return 'U+{:04X}'.format(s) + + +def valid_label_length(label: Union[bytes, str]) -> bool: + if len(label) > 63: + return False + return True + + +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label: str, check_ltr: bool = False) -> bool: + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {} at position {}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = None # type: Optional[str] + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label: str) -> bool: + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label: str) -> bool: + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label: str) -> None: + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label: str, pos: int) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('L'), ord('D')]: + ok = True + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('R'), ord('D')]: + ok = True + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == '\u30fb': + continue + if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + return False + + +def check_label(label: Union[str, bytes, bytearray]) -> None: + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format( + _unot(cp_value), pos+1, repr(label))) + except ValueError: + raise IDNAError('Unknown codepoint adjacent to joiner {} at position {} in {}'.format( + _unot(cp_value), pos+1, repr(label))) + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {} not allowed at position {} in {}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {} at position {} of {} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label: str) -> bytes: + try: + label_bytes = label.encode('ascii') + ulabel(label_bytes) + if not valid_label_length(label_bytes): + raise IDNAError('Label too long') + return label_bytes + except UnicodeEncodeError: + pass + + if not label: + raise IDNAError('No Input') + + label = str(label) + check_label(label) + label_bytes = _punycode(label) + label_bytes = _alabel_prefix + label_bytes + + if not valid_label_length(label_bytes): + raise IDNAError('Label too long') + + return label_bytes + + +def ulabel(label: Union[str, bytes, bytearray]) -> str: + if not isinstance(label, (bytes, bytearray)): + try: + label_bytes = label.encode('ascii') + except UnicodeEncodeError: + check_label(label) + return label + else: + label_bytes = label + + label_bytes = label_bytes.lower() + if label_bytes.startswith(_alabel_prefix): + label_bytes = label_bytes[len(_alabel_prefix):] + if not label_bytes: + raise IDNAError('Malformed A-label, no Punycode eligible content found') + if label_bytes.decode('ascii')[-1] == '-': + raise IDNAError('A-label must not end with a hyphen') + else: + check_label(label_bytes) + return label_bytes.decode('ascii') + + try: + label = label_bytes.decode('punycode') + except UnicodeError: + raise IDNAError('Invalid A-label') + check_label(label) + return label + + +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = '' + + for pos, char in enumerate(domain): + code_point = ord(char) + try: + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, 'Z')) - 1] + status = uts46row[1] + replacement = None # type: Optional[str] + if len(uts46row) == 3: + replacement = uts46row[2] # type: ignore + if (status == 'V' or + (status == 'D' and not transitional) or + (status == '3' and not std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == 'M' or + (status == '3' and not std3_rules) or + (status == 'D' and transitional)): + output += replacement + elif status != 'I': + raise IndexError() + except IndexError: + raise InvalidCodepoint( + 'Codepoint {} not allowed at position {} in {}'.format( + _unot(code_point), pos + 1, repr(domain))) + + return unicodedata.normalize('NFC', output) + + +def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes: + if isinstance(s, (bytes, bytearray)): + s = s.decode('ascii') + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False) -> str: + try: + if isinstance(s, (bytes, bytearray)): + s = s.decode('ascii') + except UnicodeDecodeError: + raise IDNAError('Invalid ASCII in A-label') + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split('.') + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append('') + return '.'.join(result) diff --git a/venv/lib/python3.10/site-packages/idna/idnadata.py b/venv/lib/python3.10/site-packages/idna/idnadata.py new file mode 100644 index 0000000..1b5805d --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/idnadata.py @@ -0,0 +1,2137 @@ +# This file is automatically generated by tools/idna-data + +__version__ = '14.0.0' +scripts = { + 'Greek': ( + 0x37000000374, + 0x37500000378, + 0x37a0000037e, + 0x37f00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038b, + 0x38c0000038d, + 0x38e000003a2, + 0x3a3000003e2, + 0x3f000000400, + 0x1d2600001d2b, + 0x1d5d00001d62, + 0x1d6600001d6b, + 0x1dbf00001dc0, + 0x1f0000001f16, + 0x1f1800001f1e, + 0x1f2000001f46, + 0x1f4800001f4e, + 0x1f5000001f58, + 0x1f5900001f5a, + 0x1f5b00001f5c, + 0x1f5d00001f5e, + 0x1f5f00001f7e, + 0x1f8000001fb5, + 0x1fb600001fc5, + 0x1fc600001fd4, + 0x1fd600001fdc, + 0x1fdd00001ff0, + 0x1ff200001ff5, + 0x1ff600001fff, + 0x212600002127, + 0xab650000ab66, + 0x101400001018f, + 0x101a0000101a1, + 0x1d2000001d246, + ), + 'Han': ( + 0x2e8000002e9a, + 0x2e9b00002ef4, + 0x2f0000002fd6, + 0x300500003006, + 0x300700003008, + 0x30210000302a, + 0x30380000303c, + 0x340000004dc0, + 0x4e000000a000, + 0xf9000000fa6e, + 0xfa700000fada, + 0x16fe200016fe4, + 0x16ff000016ff2, + 0x200000002a6e0, + 0x2a7000002b739, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x2f8000002fa1e, + 0x300000003134b, + ), + 'Hebrew': ( + 0x591000005c8, + 0x5d0000005eb, + 0x5ef000005f5, + 0xfb1d0000fb37, + 0xfb380000fb3d, + 0xfb3e0000fb3f, + 0xfb400000fb42, + 0xfb430000fb45, + 0xfb460000fb50, + ), + 'Hiragana': ( + 0x304100003097, + 0x309d000030a0, + 0x1b0010001b120, + 0x1b1500001b153, + 0x1f2000001f201, + ), + 'Katakana': ( + 0x30a1000030fb, + 0x30fd00003100, + 0x31f000003200, + 0x32d0000032ff, + 0x330000003358, + 0xff660000ff70, + 0xff710000ff9e, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b001, + 0x1b1200001b123, + 0x1b1640001b168, + ), +} +joining_types = { + 0x600: 85, + 0x601: 85, + 0x602: 85, + 0x603: 85, + 0x604: 85, + 0x605: 85, + 0x608: 85, + 0x60b: 85, + 0x620: 68, + 0x621: 85, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62a: 68, + 0x62b: 68, + 0x62c: 68, + 0x62d: 68, + 0x62e: 68, + 0x62f: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63a: 68, + 0x63b: 68, + 0x63c: 68, + 0x63d: 68, + 0x63e: 68, + 0x63f: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64a: 68, + 0x66e: 68, + 0x66f: 68, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x674: 85, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67a: 68, + 0x67b: 68, + 0x67c: 68, + 0x67d: 68, + 0x67e: 68, + 0x67f: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68a: 82, + 0x68b: 82, + 0x68c: 82, + 0x68d: 82, + 0x68e: 82, + 0x68f: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69a: 68, + 0x69b: 68, + 0x69c: 68, + 0x69d: 68, + 0x69e: 68, + 0x69f: 68, + 0x6a0: 68, + 0x6a1: 68, + 0x6a2: 68, + 0x6a3: 68, + 0x6a4: 68, + 0x6a5: 68, + 0x6a6: 68, + 0x6a7: 68, + 0x6a8: 68, + 0x6a9: 68, + 0x6aa: 68, + 0x6ab: 68, + 0x6ac: 68, + 0x6ad: 68, + 0x6ae: 68, + 0x6af: 68, + 0x6b0: 68, + 0x6b1: 68, + 0x6b2: 68, + 0x6b3: 68, + 0x6b4: 68, + 0x6b5: 68, + 0x6b6: 68, + 0x6b7: 68, + 0x6b8: 68, + 0x6b9: 68, + 0x6ba: 68, + 0x6bb: 68, + 0x6bc: 68, + 0x6bd: 68, + 0x6be: 68, + 0x6bf: 68, + 0x6c0: 82, + 0x6c1: 68, + 0x6c2: 68, + 0x6c3: 82, + 0x6c4: 82, + 0x6c5: 82, + 0x6c6: 82, + 0x6c7: 82, + 0x6c8: 82, + 0x6c9: 82, + 0x6ca: 82, + 0x6cb: 82, + 0x6cc: 68, + 0x6cd: 82, + 0x6ce: 68, + 0x6cf: 82, + 0x6d0: 68, + 0x6d1: 68, + 0x6d2: 82, + 0x6d3: 82, + 0x6d5: 82, + 0x6dd: 85, + 0x6ee: 82, + 0x6ef: 82, + 0x6fa: 68, + 0x6fb: 68, + 0x6fc: 68, + 0x6ff: 68, + 0x70f: 84, + 0x710: 82, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71a: 68, + 0x71b: 68, + 0x71c: 68, + 0x71d: 68, + 0x71e: 82, + 0x71f: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72a: 82, + 0x72b: 68, + 0x72c: 82, + 0x72d: 68, + 0x72e: 68, + 0x72f: 82, + 0x74d: 82, + 0x74e: 68, + 0x74f: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75a: 82, + 0x75b: 82, + 0x75c: 68, + 0x75d: 68, + 0x75e: 68, + 0x75f: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76a: 68, + 0x76b: 82, + 0x76c: 82, + 0x76d: 68, + 0x76e: 68, + 0x76f: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77a: 68, + 0x77b: 68, + 0x77c: 68, + 0x77d: 68, + 0x77e: 68, + 0x77f: 68, + 0x7ca: 68, + 0x7cb: 68, + 0x7cc: 68, + 0x7cd: 68, + 0x7ce: 68, + 0x7cf: 68, + 0x7d0: 68, + 0x7d1: 68, + 0x7d2: 68, + 0x7d3: 68, + 0x7d4: 68, + 0x7d5: 68, + 0x7d6: 68, + 0x7d7: 68, + 0x7d8: 68, + 0x7d9: 68, + 0x7da: 68, + 0x7db: 68, + 0x7dc: 68, + 0x7dd: 68, + 0x7de: 68, + 0x7df: 68, + 0x7e0: 68, + 0x7e1: 68, + 0x7e2: 68, + 0x7e3: 68, + 0x7e4: 68, + 0x7e5: 68, + 0x7e6: 68, + 0x7e7: 68, + 0x7e8: 68, + 0x7e9: 68, + 0x7ea: 68, + 0x7fa: 67, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84a: 68, + 0x84b: 68, + 0x84c: 68, + 0x84d: 68, + 0x84e: 68, + 0x84f: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 82, + 0x857: 82, + 0x858: 82, + 0x860: 68, + 0x861: 85, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x866: 85, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86a: 82, + 0x870: 82, + 0x871: 82, + 0x872: 82, + 0x873: 82, + 0x874: 82, + 0x875: 82, + 0x876: 82, + 0x877: 82, + 0x878: 82, + 0x879: 82, + 0x87a: 82, + 0x87b: 82, + 0x87c: 82, + 0x87d: 82, + 0x87e: 82, + 0x87f: 82, + 0x880: 82, + 0x881: 82, + 0x882: 82, + 0x883: 67, + 0x884: 67, + 0x885: 67, + 0x886: 68, + 0x887: 85, + 0x888: 85, + 0x889: 68, + 0x88a: 68, + 0x88b: 68, + 0x88c: 68, + 0x88d: 68, + 0x88e: 82, + 0x890: 85, + 0x891: 85, + 0x8a0: 68, + 0x8a1: 68, + 0x8a2: 68, + 0x8a3: 68, + 0x8a4: 68, + 0x8a5: 68, + 0x8a6: 68, + 0x8a7: 68, + 0x8a8: 68, + 0x8a9: 68, + 0x8aa: 82, + 0x8ab: 82, + 0x8ac: 82, + 0x8ad: 85, + 0x8ae: 82, + 0x8af: 68, + 0x8b0: 68, + 0x8b1: 82, + 0x8b2: 82, + 0x8b3: 68, + 0x8b4: 68, + 0x8b5: 68, + 0x8b6: 68, + 0x8b7: 68, + 0x8b8: 68, + 0x8b9: 82, + 0x8ba: 68, + 0x8bb: 68, + 0x8bc: 68, + 0x8bd: 68, + 0x8be: 68, + 0x8bf: 68, + 0x8c0: 68, + 0x8c1: 68, + 0x8c2: 68, + 0x8c3: 68, + 0x8c4: 68, + 0x8c5: 68, + 0x8c6: 68, + 0x8c7: 68, + 0x8c8: 68, + 0x8e2: 85, + 0x1806: 85, + 0x1807: 68, + 0x180a: 67, + 0x180e: 85, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182a: 68, + 0x182b: 68, + 0x182c: 68, + 0x182d: 68, + 0x182e: 68, + 0x182f: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183a: 68, + 0x183b: 68, + 0x183c: 68, + 0x183d: 68, + 0x183e: 68, + 0x183f: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184a: 68, + 0x184b: 68, + 0x184c: 68, + 0x184d: 68, + 0x184e: 68, + 0x184f: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185a: 68, + 0x185b: 68, + 0x185c: 68, + 0x185d: 68, + 0x185e: 68, + 0x185f: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186a: 68, + 0x186b: 68, + 0x186c: 68, + 0x186d: 68, + 0x186e: 68, + 0x186f: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1878: 68, + 0x1880: 85, + 0x1881: 85, + 0x1882: 85, + 0x1883: 85, + 0x1884: 85, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188a: 68, + 0x188b: 68, + 0x188c: 68, + 0x188d: 68, + 0x188e: 68, + 0x188f: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189a: 68, + 0x189b: 68, + 0x189c: 68, + 0x189d: 68, + 0x189e: 68, + 0x189f: 68, + 0x18a0: 68, + 0x18a1: 68, + 0x18a2: 68, + 0x18a3: 68, + 0x18a4: 68, + 0x18a5: 68, + 0x18a6: 68, + 0x18a7: 68, + 0x18a8: 68, + 0x18aa: 68, + 0x200c: 85, + 0x200d: 67, + 0x202f: 85, + 0x2066: 85, + 0x2067: 85, + 0x2068: 85, + 0x2069: 85, + 0xa840: 68, + 0xa841: 68, + 0xa842: 68, + 0xa843: 68, + 0xa844: 68, + 0xa845: 68, + 0xa846: 68, + 0xa847: 68, + 0xa848: 68, + 0xa849: 68, + 0xa84a: 68, + 0xa84b: 68, + 0xa84c: 68, + 0xa84d: 68, + 0xa84e: 68, + 0xa84f: 68, + 0xa850: 68, + 0xa851: 68, + 0xa852: 68, + 0xa853: 68, + 0xa854: 68, + 0xa855: 68, + 0xa856: 68, + 0xa857: 68, + 0xa858: 68, + 0xa859: 68, + 0xa85a: 68, + 0xa85b: 68, + 0xa85c: 68, + 0xa85d: 68, + 0xa85e: 68, + 0xa85f: 68, + 0xa860: 68, + 0xa861: 68, + 0xa862: 68, + 0xa863: 68, + 0xa864: 68, + 0xa865: 68, + 0xa866: 68, + 0xa867: 68, + 0xa868: 68, + 0xa869: 68, + 0xa86a: 68, + 0xa86b: 68, + 0xa86c: 68, + 0xa86d: 68, + 0xa86e: 68, + 0xa86f: 68, + 0xa870: 68, + 0xa871: 68, + 0xa872: 76, + 0xa873: 85, + 0x10ac0: 68, + 0x10ac1: 68, + 0x10ac2: 68, + 0x10ac3: 68, + 0x10ac4: 68, + 0x10ac5: 82, + 0x10ac6: 85, + 0x10ac7: 82, + 0x10ac8: 85, + 0x10ac9: 82, + 0x10aca: 82, + 0x10acb: 85, + 0x10acc: 85, + 0x10acd: 76, + 0x10ace: 82, + 0x10acf: 82, + 0x10ad0: 82, + 0x10ad1: 82, + 0x10ad2: 82, + 0x10ad3: 68, + 0x10ad4: 68, + 0x10ad5: 68, + 0x10ad6: 68, + 0x10ad7: 76, + 0x10ad8: 68, + 0x10ad9: 68, + 0x10ada: 68, + 0x10adb: 68, + 0x10adc: 68, + 0x10add: 82, + 0x10ade: 68, + 0x10adf: 68, + 0x10ae0: 68, + 0x10ae1: 82, + 0x10ae2: 85, + 0x10ae3: 85, + 0x10ae4: 82, + 0x10aeb: 68, + 0x10aec: 68, + 0x10aed: 68, + 0x10aee: 68, + 0x10aef: 82, + 0x10b80: 68, + 0x10b81: 82, + 0x10b82: 68, + 0x10b83: 82, + 0x10b84: 82, + 0x10b85: 82, + 0x10b86: 68, + 0x10b87: 68, + 0x10b88: 68, + 0x10b89: 82, + 0x10b8a: 68, + 0x10b8b: 68, + 0x10b8c: 82, + 0x10b8d: 68, + 0x10b8e: 82, + 0x10b8f: 82, + 0x10b90: 68, + 0x10b91: 82, + 0x10ba9: 82, + 0x10baa: 82, + 0x10bab: 82, + 0x10bac: 82, + 0x10bad: 68, + 0x10bae: 68, + 0x10baf: 85, + 0x10d00: 76, + 0x10d01: 68, + 0x10d02: 68, + 0x10d03: 68, + 0x10d04: 68, + 0x10d05: 68, + 0x10d06: 68, + 0x10d07: 68, + 0x10d08: 68, + 0x10d09: 68, + 0x10d0a: 68, + 0x10d0b: 68, + 0x10d0c: 68, + 0x10d0d: 68, + 0x10d0e: 68, + 0x10d0f: 68, + 0x10d10: 68, + 0x10d11: 68, + 0x10d12: 68, + 0x10d13: 68, + 0x10d14: 68, + 0x10d15: 68, + 0x10d16: 68, + 0x10d17: 68, + 0x10d18: 68, + 0x10d19: 68, + 0x10d1a: 68, + 0x10d1b: 68, + 0x10d1c: 68, + 0x10d1d: 68, + 0x10d1e: 68, + 0x10d1f: 68, + 0x10d20: 68, + 0x10d21: 68, + 0x10d22: 82, + 0x10d23: 68, + 0x10f30: 68, + 0x10f31: 68, + 0x10f32: 68, + 0x10f33: 82, + 0x10f34: 68, + 0x10f35: 68, + 0x10f36: 68, + 0x10f37: 68, + 0x10f38: 68, + 0x10f39: 68, + 0x10f3a: 68, + 0x10f3b: 68, + 0x10f3c: 68, + 0x10f3d: 68, + 0x10f3e: 68, + 0x10f3f: 68, + 0x10f40: 68, + 0x10f41: 68, + 0x10f42: 68, + 0x10f43: 68, + 0x10f44: 68, + 0x10f45: 85, + 0x10f51: 68, + 0x10f52: 68, + 0x10f53: 68, + 0x10f54: 82, + 0x10f70: 68, + 0x10f71: 68, + 0x10f72: 68, + 0x10f73: 68, + 0x10f74: 82, + 0x10f75: 82, + 0x10f76: 68, + 0x10f77: 68, + 0x10f78: 68, + 0x10f79: 68, + 0x10f7a: 68, + 0x10f7b: 68, + 0x10f7c: 68, + 0x10f7d: 68, + 0x10f7e: 68, + 0x10f7f: 68, + 0x10f80: 68, + 0x10f81: 68, + 0x10fb0: 68, + 0x10fb1: 85, + 0x10fb2: 68, + 0x10fb3: 68, + 0x10fb4: 82, + 0x10fb5: 82, + 0x10fb6: 82, + 0x10fb7: 85, + 0x10fb8: 68, + 0x10fb9: 82, + 0x10fba: 82, + 0x10fbb: 68, + 0x10fbc: 68, + 0x10fbd: 82, + 0x10fbe: 68, + 0x10fbf: 68, + 0x10fc0: 85, + 0x10fc1: 68, + 0x10fc2: 82, + 0x10fc3: 82, + 0x10fc4: 68, + 0x10fc5: 85, + 0x10fc6: 85, + 0x10fc7: 85, + 0x10fc8: 85, + 0x10fc9: 82, + 0x10fca: 68, + 0x10fcb: 76, + 0x110bd: 85, + 0x110cd: 85, + 0x1e900: 68, + 0x1e901: 68, + 0x1e902: 68, + 0x1e903: 68, + 0x1e904: 68, + 0x1e905: 68, + 0x1e906: 68, + 0x1e907: 68, + 0x1e908: 68, + 0x1e909: 68, + 0x1e90a: 68, + 0x1e90b: 68, + 0x1e90c: 68, + 0x1e90d: 68, + 0x1e90e: 68, + 0x1e90f: 68, + 0x1e910: 68, + 0x1e911: 68, + 0x1e912: 68, + 0x1e913: 68, + 0x1e914: 68, + 0x1e915: 68, + 0x1e916: 68, + 0x1e917: 68, + 0x1e918: 68, + 0x1e919: 68, + 0x1e91a: 68, + 0x1e91b: 68, + 0x1e91c: 68, + 0x1e91d: 68, + 0x1e91e: 68, + 0x1e91f: 68, + 0x1e920: 68, + 0x1e921: 68, + 0x1e922: 68, + 0x1e923: 68, + 0x1e924: 68, + 0x1e925: 68, + 0x1e926: 68, + 0x1e927: 68, + 0x1e928: 68, + 0x1e929: 68, + 0x1e92a: 68, + 0x1e92b: 68, + 0x1e92c: 68, + 0x1e92d: 68, + 0x1e92e: 68, + 0x1e92f: 68, + 0x1e930: 68, + 0x1e931: 68, + 0x1e932: 68, + 0x1e933: 68, + 0x1e934: 68, + 0x1e935: 68, + 0x1e936: 68, + 0x1e937: 68, + 0x1e938: 68, + 0x1e939: 68, + 0x1e93a: 68, + 0x1e93b: 68, + 0x1e93c: 68, + 0x1e93d: 68, + 0x1e93e: 68, + 0x1e93f: 68, + 0x1e940: 68, + 0x1e941: 68, + 0x1e942: 68, + 0x1e943: 68, + 0x1e94b: 84, +} +codepoint_classes = { + 'PVALID': ( + 0x2d0000002e, + 0x300000003a, + 0x610000007b, + 0xdf000000f7, + 0xf800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010a, + 0x10b0000010c, + 0x10d0000010e, + 0x10f00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011a, + 0x11b0000011c, + 0x11d0000011e, + 0x11f00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012a, + 0x12b0000012c, + 0x12d0000012e, + 0x12f00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13a0000013b, + 0x13c0000013d, + 0x13e0000013f, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14b0000014c, + 0x14d0000014e, + 0x14f00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015a, + 0x15b0000015c, + 0x15d0000015e, + 0x15f00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016a, + 0x16b0000016c, + 0x16d0000016e, + 0x16f00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17a0000017b, + 0x17c0000017d, + 0x17e0000017f, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18c0000018e, + 0x19200000193, + 0x19500000196, + 0x1990000019c, + 0x19e0000019f, + 0x1a1000001a2, + 0x1a3000001a4, + 0x1a5000001a6, + 0x1a8000001a9, + 0x1aa000001ac, + 0x1ad000001ae, + 0x1b0000001b1, + 0x1b4000001b5, + 0x1b6000001b7, + 0x1b9000001bc, + 0x1bd000001c4, + 0x1ce000001cf, + 0x1d0000001d1, + 0x1d2000001d3, + 0x1d4000001d5, + 0x1d6000001d7, + 0x1d8000001d9, + 0x1da000001db, + 0x1dc000001de, + 0x1df000001e0, + 0x1e1000001e2, + 0x1e3000001e4, + 0x1e5000001e6, + 0x1e7000001e8, + 0x1e9000001ea, + 0x1eb000001ec, + 0x1ed000001ee, + 0x1ef000001f1, + 0x1f5000001f6, + 0x1f9000001fa, + 0x1fb000001fc, + 0x1fd000001fe, + 0x1ff00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020a, + 0x20b0000020c, + 0x20d0000020e, + 0x20f00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021a, + 0x21b0000021c, + 0x21d0000021e, + 0x21f00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022a, + 0x22b0000022c, + 0x22d0000022e, + 0x22f00000230, + 0x23100000232, + 0x2330000023a, + 0x23c0000023d, + 0x23f00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024a, + 0x24b0000024c, + 0x24d0000024e, + 0x24f000002b0, + 0x2b9000002c2, + 0x2c6000002d2, + 0x2ec000002ed, + 0x2ee000002ef, + 0x30000000340, + 0x34200000343, + 0x3460000034f, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37b0000037e, + 0x39000000391, + 0x3ac000003cf, + 0x3d7000003d8, + 0x3d9000003da, + 0x3db000003dc, + 0x3dd000003de, + 0x3df000003e0, + 0x3e1000003e2, + 0x3e3000003e4, + 0x3e5000003e6, + 0x3e7000003e8, + 0x3e9000003ea, + 0x3eb000003ec, + 0x3ed000003ee, + 0x3ef000003f0, + 0x3f3000003f4, + 0x3f8000003f9, + 0x3fb000003fd, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046a, + 0x46b0000046c, + 0x46d0000046e, + 0x46f00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047a, + 0x47b0000047c, + 0x47d0000047e, + 0x47f00000480, + 0x48100000482, + 0x48300000488, + 0x48b0000048c, + 0x48d0000048e, + 0x48f00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049a, + 0x49b0000049c, + 0x49d0000049e, + 0x49f000004a0, + 0x4a1000004a2, + 0x4a3000004a4, + 0x4a5000004a6, + 0x4a7000004a8, + 0x4a9000004aa, + 0x4ab000004ac, + 0x4ad000004ae, + 0x4af000004b0, + 0x4b1000004b2, + 0x4b3000004b4, + 0x4b5000004b6, + 0x4b7000004b8, + 0x4b9000004ba, + 0x4bb000004bc, + 0x4bd000004be, + 0x4bf000004c0, + 0x4c2000004c3, + 0x4c4000004c5, + 0x4c6000004c7, + 0x4c8000004c9, + 0x4ca000004cb, + 0x4cc000004cd, + 0x4ce000004d0, + 0x4d1000004d2, + 0x4d3000004d4, + 0x4d5000004d6, + 0x4d7000004d8, + 0x4d9000004da, + 0x4db000004dc, + 0x4dd000004de, + 0x4df000004e0, + 0x4e1000004e2, + 0x4e3000004e4, + 0x4e5000004e6, + 0x4e7000004e8, + 0x4e9000004ea, + 0x4eb000004ec, + 0x4ed000004ee, + 0x4ef000004f0, + 0x4f1000004f2, + 0x4f3000004f4, + 0x4f5000004f6, + 0x4f7000004f8, + 0x4f9000004fa, + 0x4fb000004fc, + 0x4fd000004fe, + 0x4ff00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050a, + 0x50b0000050c, + 0x50d0000050e, + 0x50f00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051a, + 0x51b0000051c, + 0x51d0000051e, + 0x51f00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052a, + 0x52b0000052c, + 0x52d0000052e, + 0x52f00000530, + 0x5590000055a, + 0x56000000587, + 0x58800000589, + 0x591000005be, + 0x5bf000005c0, + 0x5c1000005c3, + 0x5c4000005c6, + 0x5c7000005c8, + 0x5d0000005eb, + 0x5ef000005f3, + 0x6100000061b, + 0x62000000640, + 0x64100000660, + 0x66e00000675, + 0x679000006d4, + 0x6d5000006dd, + 0x6df000006e9, + 0x6ea000006f0, + 0x6fa00000700, + 0x7100000074b, + 0x74d000007b2, + 0x7c0000007f6, + 0x7fd000007fe, + 0x8000000082e, + 0x8400000085c, + 0x8600000086b, + 0x87000000888, + 0x8890000088f, + 0x898000008e2, + 0x8e300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098d, + 0x98f00000991, + 0x993000009a9, + 0x9aa000009b1, + 0x9b2000009b3, + 0x9b6000009ba, + 0x9bc000009c5, + 0x9c7000009c9, + 0x9cb000009cf, + 0x9d7000009d8, + 0x9e0000009e4, + 0x9e6000009f2, + 0x9fc000009fd, + 0x9fe000009ff, + 0xa0100000a04, + 0xa0500000a0b, + 0xa0f00000a11, + 0xa1300000a29, + 0xa2a00000a31, + 0xa3200000a33, + 0xa3500000a36, + 0xa3800000a3a, + 0xa3c00000a3d, + 0xa3e00000a43, + 0xa4700000a49, + 0xa4b00000a4e, + 0xa5100000a52, + 0xa5c00000a5d, + 0xa6600000a76, + 0xa8100000a84, + 0xa8500000a8e, + 0xa8f00000a92, + 0xa9300000aa9, + 0xaaa00000ab1, + 0xab200000ab4, + 0xab500000aba, + 0xabc00000ac6, + 0xac700000aca, + 0xacb00000ace, + 0xad000000ad1, + 0xae000000ae4, + 0xae600000af0, + 0xaf900000b00, + 0xb0100000b04, + 0xb0500000b0d, + 0xb0f00000b11, + 0xb1300000b29, + 0xb2a00000b31, + 0xb3200000b34, + 0xb3500000b3a, + 0xb3c00000b45, + 0xb4700000b49, + 0xb4b00000b4e, + 0xb5500000b58, + 0xb5f00000b64, + 0xb6600000b70, + 0xb7100000b72, + 0xb8200000b84, + 0xb8500000b8b, + 0xb8e00000b91, + 0xb9200000b96, + 0xb9900000b9b, + 0xb9c00000b9d, + 0xb9e00000ba0, + 0xba300000ba5, + 0xba800000bab, + 0xbae00000bba, + 0xbbe00000bc3, + 0xbc600000bc9, + 0xbca00000bce, + 0xbd000000bd1, + 0xbd700000bd8, + 0xbe600000bf0, + 0xc0000000c0d, + 0xc0e00000c11, + 0xc1200000c29, + 0xc2a00000c3a, + 0xc3c00000c45, + 0xc4600000c49, + 0xc4a00000c4e, + 0xc5500000c57, + 0xc5800000c5b, + 0xc5d00000c5e, + 0xc6000000c64, + 0xc6600000c70, + 0xc8000000c84, + 0xc8500000c8d, + 0xc8e00000c91, + 0xc9200000ca9, + 0xcaa00000cb4, + 0xcb500000cba, + 0xcbc00000cc5, + 0xcc600000cc9, + 0xcca00000cce, + 0xcd500000cd7, + 0xcdd00000cdf, + 0xce000000ce4, + 0xce600000cf0, + 0xcf100000cf3, + 0xd0000000d0d, + 0xd0e00000d11, + 0xd1200000d45, + 0xd4600000d49, + 0xd4a00000d4f, + 0xd5400000d58, + 0xd5f00000d64, + 0xd6600000d70, + 0xd7a00000d80, + 0xd8100000d84, + 0xd8500000d97, + 0xd9a00000db2, + 0xdb300000dbc, + 0xdbd00000dbe, + 0xdc000000dc7, + 0xdca00000dcb, + 0xdcf00000dd5, + 0xdd600000dd7, + 0xdd800000de0, + 0xde600000df0, + 0xdf200000df4, + 0xe0100000e33, + 0xe3400000e3b, + 0xe4000000e4f, + 0xe5000000e5a, + 0xe8100000e83, + 0xe8400000e85, + 0xe8600000e8b, + 0xe8c00000ea4, + 0xea500000ea6, + 0xea700000eb3, + 0xeb400000ebe, + 0xec000000ec5, + 0xec600000ec7, + 0xec800000ece, + 0xed000000eda, + 0xede00000ee0, + 0xf0000000f01, + 0xf0b00000f0c, + 0xf1800000f1a, + 0xf2000000f2a, + 0xf3500000f36, + 0xf3700000f38, + 0xf3900000f3a, + 0xf3e00000f43, + 0xf4400000f48, + 0xf4900000f4d, + 0xf4e00000f52, + 0xf5300000f57, + 0xf5800000f5c, + 0xf5d00000f69, + 0xf6a00000f6d, + 0xf7100000f73, + 0xf7400000f75, + 0xf7a00000f81, + 0xf8200000f85, + 0xf8600000f93, + 0xf9400000f98, + 0xf9900000f9d, + 0xf9e00000fa2, + 0xfa300000fa7, + 0xfa800000fac, + 0xfad00000fb9, + 0xfba00000fbd, + 0xfc600000fc7, + 0x10000000104a, + 0x10500000109e, + 0x10d0000010fb, + 0x10fd00001100, + 0x120000001249, + 0x124a0000124e, + 0x125000001257, + 0x125800001259, + 0x125a0000125e, + 0x126000001289, + 0x128a0000128e, + 0x1290000012b1, + 0x12b2000012b6, + 0x12b8000012bf, + 0x12c0000012c1, + 0x12c2000012c6, + 0x12c8000012d7, + 0x12d800001311, + 0x131200001316, + 0x13180000135b, + 0x135d00001360, + 0x138000001390, + 0x13a0000013f6, + 0x14010000166d, + 0x166f00001680, + 0x16810000169b, + 0x16a0000016eb, + 0x16f1000016f9, + 0x170000001716, + 0x171f00001735, + 0x174000001754, + 0x17600000176d, + 0x176e00001771, + 0x177200001774, + 0x1780000017b4, + 0x17b6000017d4, + 0x17d7000017d8, + 0x17dc000017de, + 0x17e0000017ea, + 0x18100000181a, + 0x182000001879, + 0x1880000018ab, + 0x18b0000018f6, + 0x19000000191f, + 0x19200000192c, + 0x19300000193c, + 0x19460000196e, + 0x197000001975, + 0x1980000019ac, + 0x19b0000019ca, + 0x19d0000019da, + 0x1a0000001a1c, + 0x1a2000001a5f, + 0x1a6000001a7d, + 0x1a7f00001a8a, + 0x1a9000001a9a, + 0x1aa700001aa8, + 0x1ab000001abe, + 0x1abf00001acf, + 0x1b0000001b4d, + 0x1b5000001b5a, + 0x1b6b00001b74, + 0x1b8000001bf4, + 0x1c0000001c38, + 0x1c4000001c4a, + 0x1c4d00001c7e, + 0x1cd000001cd3, + 0x1cd400001cfb, + 0x1d0000001d2c, + 0x1d2f00001d30, + 0x1d3b00001d3c, + 0x1d4e00001d4f, + 0x1d6b00001d78, + 0x1d7900001d9b, + 0x1dc000001e00, + 0x1e0100001e02, + 0x1e0300001e04, + 0x1e0500001e06, + 0x1e0700001e08, + 0x1e0900001e0a, + 0x1e0b00001e0c, + 0x1e0d00001e0e, + 0x1e0f00001e10, + 0x1e1100001e12, + 0x1e1300001e14, + 0x1e1500001e16, + 0x1e1700001e18, + 0x1e1900001e1a, + 0x1e1b00001e1c, + 0x1e1d00001e1e, + 0x1e1f00001e20, + 0x1e2100001e22, + 0x1e2300001e24, + 0x1e2500001e26, + 0x1e2700001e28, + 0x1e2900001e2a, + 0x1e2b00001e2c, + 0x1e2d00001e2e, + 0x1e2f00001e30, + 0x1e3100001e32, + 0x1e3300001e34, + 0x1e3500001e36, + 0x1e3700001e38, + 0x1e3900001e3a, + 0x1e3b00001e3c, + 0x1e3d00001e3e, + 0x1e3f00001e40, + 0x1e4100001e42, + 0x1e4300001e44, + 0x1e4500001e46, + 0x1e4700001e48, + 0x1e4900001e4a, + 0x1e4b00001e4c, + 0x1e4d00001e4e, + 0x1e4f00001e50, + 0x1e5100001e52, + 0x1e5300001e54, + 0x1e5500001e56, + 0x1e5700001e58, + 0x1e5900001e5a, + 0x1e5b00001e5c, + 0x1e5d00001e5e, + 0x1e5f00001e60, + 0x1e6100001e62, + 0x1e6300001e64, + 0x1e6500001e66, + 0x1e6700001e68, + 0x1e6900001e6a, + 0x1e6b00001e6c, + 0x1e6d00001e6e, + 0x1e6f00001e70, + 0x1e7100001e72, + 0x1e7300001e74, + 0x1e7500001e76, + 0x1e7700001e78, + 0x1e7900001e7a, + 0x1e7b00001e7c, + 0x1e7d00001e7e, + 0x1e7f00001e80, + 0x1e8100001e82, + 0x1e8300001e84, + 0x1e8500001e86, + 0x1e8700001e88, + 0x1e8900001e8a, + 0x1e8b00001e8c, + 0x1e8d00001e8e, + 0x1e8f00001e90, + 0x1e9100001e92, + 0x1e9300001e94, + 0x1e9500001e9a, + 0x1e9c00001e9e, + 0x1e9f00001ea0, + 0x1ea100001ea2, + 0x1ea300001ea4, + 0x1ea500001ea6, + 0x1ea700001ea8, + 0x1ea900001eaa, + 0x1eab00001eac, + 0x1ead00001eae, + 0x1eaf00001eb0, + 0x1eb100001eb2, + 0x1eb300001eb4, + 0x1eb500001eb6, + 0x1eb700001eb8, + 0x1eb900001eba, + 0x1ebb00001ebc, + 0x1ebd00001ebe, + 0x1ebf00001ec0, + 0x1ec100001ec2, + 0x1ec300001ec4, + 0x1ec500001ec6, + 0x1ec700001ec8, + 0x1ec900001eca, + 0x1ecb00001ecc, + 0x1ecd00001ece, + 0x1ecf00001ed0, + 0x1ed100001ed2, + 0x1ed300001ed4, + 0x1ed500001ed6, + 0x1ed700001ed8, + 0x1ed900001eda, + 0x1edb00001edc, + 0x1edd00001ede, + 0x1edf00001ee0, + 0x1ee100001ee2, + 0x1ee300001ee4, + 0x1ee500001ee6, + 0x1ee700001ee8, + 0x1ee900001eea, + 0x1eeb00001eec, + 0x1eed00001eee, + 0x1eef00001ef0, + 0x1ef100001ef2, + 0x1ef300001ef4, + 0x1ef500001ef6, + 0x1ef700001ef8, + 0x1ef900001efa, + 0x1efb00001efc, + 0x1efd00001efe, + 0x1eff00001f08, + 0x1f1000001f16, + 0x1f2000001f28, + 0x1f3000001f38, + 0x1f4000001f46, + 0x1f5000001f58, + 0x1f6000001f68, + 0x1f7000001f71, + 0x1f7200001f73, + 0x1f7400001f75, + 0x1f7600001f77, + 0x1f7800001f79, + 0x1f7a00001f7b, + 0x1f7c00001f7d, + 0x1fb000001fb2, + 0x1fb600001fb7, + 0x1fc600001fc7, + 0x1fd000001fd3, + 0x1fd600001fd8, + 0x1fe000001fe3, + 0x1fe400001fe8, + 0x1ff600001ff7, + 0x214e0000214f, + 0x218400002185, + 0x2c3000002c60, + 0x2c6100002c62, + 0x2c6500002c67, + 0x2c6800002c69, + 0x2c6a00002c6b, + 0x2c6c00002c6d, + 0x2c7100002c72, + 0x2c7300002c75, + 0x2c7600002c7c, + 0x2c8100002c82, + 0x2c8300002c84, + 0x2c8500002c86, + 0x2c8700002c88, + 0x2c8900002c8a, + 0x2c8b00002c8c, + 0x2c8d00002c8e, + 0x2c8f00002c90, + 0x2c9100002c92, + 0x2c9300002c94, + 0x2c9500002c96, + 0x2c9700002c98, + 0x2c9900002c9a, + 0x2c9b00002c9c, + 0x2c9d00002c9e, + 0x2c9f00002ca0, + 0x2ca100002ca2, + 0x2ca300002ca4, + 0x2ca500002ca6, + 0x2ca700002ca8, + 0x2ca900002caa, + 0x2cab00002cac, + 0x2cad00002cae, + 0x2caf00002cb0, + 0x2cb100002cb2, + 0x2cb300002cb4, + 0x2cb500002cb6, + 0x2cb700002cb8, + 0x2cb900002cba, + 0x2cbb00002cbc, + 0x2cbd00002cbe, + 0x2cbf00002cc0, + 0x2cc100002cc2, + 0x2cc300002cc4, + 0x2cc500002cc6, + 0x2cc700002cc8, + 0x2cc900002cca, + 0x2ccb00002ccc, + 0x2ccd00002cce, + 0x2ccf00002cd0, + 0x2cd100002cd2, + 0x2cd300002cd4, + 0x2cd500002cd6, + 0x2cd700002cd8, + 0x2cd900002cda, + 0x2cdb00002cdc, + 0x2cdd00002cde, + 0x2cdf00002ce0, + 0x2ce100002ce2, + 0x2ce300002ce5, + 0x2cec00002ced, + 0x2cee00002cf2, + 0x2cf300002cf4, + 0x2d0000002d26, + 0x2d2700002d28, + 0x2d2d00002d2e, + 0x2d3000002d68, + 0x2d7f00002d97, + 0x2da000002da7, + 0x2da800002daf, + 0x2db000002db7, + 0x2db800002dbf, + 0x2dc000002dc7, + 0x2dc800002dcf, + 0x2dd000002dd7, + 0x2dd800002ddf, + 0x2de000002e00, + 0x2e2f00002e30, + 0x300500003008, + 0x302a0000302e, + 0x303c0000303d, + 0x304100003097, + 0x30990000309b, + 0x309d0000309f, + 0x30a1000030fb, + 0x30fc000030ff, + 0x310500003130, + 0x31a0000031c0, + 0x31f000003200, + 0x340000004dc0, + 0x4e000000a48d, + 0xa4d00000a4fe, + 0xa5000000a60d, + 0xa6100000a62c, + 0xa6410000a642, + 0xa6430000a644, + 0xa6450000a646, + 0xa6470000a648, + 0xa6490000a64a, + 0xa64b0000a64c, + 0xa64d0000a64e, + 0xa64f0000a650, + 0xa6510000a652, + 0xa6530000a654, + 0xa6550000a656, + 0xa6570000a658, + 0xa6590000a65a, + 0xa65b0000a65c, + 0xa65d0000a65e, + 0xa65f0000a660, + 0xa6610000a662, + 0xa6630000a664, + 0xa6650000a666, + 0xa6670000a668, + 0xa6690000a66a, + 0xa66b0000a66c, + 0xa66d0000a670, + 0xa6740000a67e, + 0xa67f0000a680, + 0xa6810000a682, + 0xa6830000a684, + 0xa6850000a686, + 0xa6870000a688, + 0xa6890000a68a, + 0xa68b0000a68c, + 0xa68d0000a68e, + 0xa68f0000a690, + 0xa6910000a692, + 0xa6930000a694, + 0xa6950000a696, + 0xa6970000a698, + 0xa6990000a69a, + 0xa69b0000a69c, + 0xa69e0000a6e6, + 0xa6f00000a6f2, + 0xa7170000a720, + 0xa7230000a724, + 0xa7250000a726, + 0xa7270000a728, + 0xa7290000a72a, + 0xa72b0000a72c, + 0xa72d0000a72e, + 0xa72f0000a732, + 0xa7330000a734, + 0xa7350000a736, + 0xa7370000a738, + 0xa7390000a73a, + 0xa73b0000a73c, + 0xa73d0000a73e, + 0xa73f0000a740, + 0xa7410000a742, + 0xa7430000a744, + 0xa7450000a746, + 0xa7470000a748, + 0xa7490000a74a, + 0xa74b0000a74c, + 0xa74d0000a74e, + 0xa74f0000a750, + 0xa7510000a752, + 0xa7530000a754, + 0xa7550000a756, + 0xa7570000a758, + 0xa7590000a75a, + 0xa75b0000a75c, + 0xa75d0000a75e, + 0xa75f0000a760, + 0xa7610000a762, + 0xa7630000a764, + 0xa7650000a766, + 0xa7670000a768, + 0xa7690000a76a, + 0xa76b0000a76c, + 0xa76d0000a76e, + 0xa76f0000a770, + 0xa7710000a779, + 0xa77a0000a77b, + 0xa77c0000a77d, + 0xa77f0000a780, + 0xa7810000a782, + 0xa7830000a784, + 0xa7850000a786, + 0xa7870000a789, + 0xa78c0000a78d, + 0xa78e0000a790, + 0xa7910000a792, + 0xa7930000a796, + 0xa7970000a798, + 0xa7990000a79a, + 0xa79b0000a79c, + 0xa79d0000a79e, + 0xa79f0000a7a0, + 0xa7a10000a7a2, + 0xa7a30000a7a4, + 0xa7a50000a7a6, + 0xa7a70000a7a8, + 0xa7a90000a7aa, + 0xa7af0000a7b0, + 0xa7b50000a7b6, + 0xa7b70000a7b8, + 0xa7b90000a7ba, + 0xa7bb0000a7bc, + 0xa7bd0000a7be, + 0xa7bf0000a7c0, + 0xa7c10000a7c2, + 0xa7c30000a7c4, + 0xa7c80000a7c9, + 0xa7ca0000a7cb, + 0xa7d10000a7d2, + 0xa7d30000a7d4, + 0xa7d50000a7d6, + 0xa7d70000a7d8, + 0xa7d90000a7da, + 0xa7f20000a7f5, + 0xa7f60000a7f8, + 0xa7fa0000a828, + 0xa82c0000a82d, + 0xa8400000a874, + 0xa8800000a8c6, + 0xa8d00000a8da, + 0xa8e00000a8f8, + 0xa8fb0000a8fc, + 0xa8fd0000a92e, + 0xa9300000a954, + 0xa9800000a9c1, + 0xa9cf0000a9da, + 0xa9e00000a9ff, + 0xaa000000aa37, + 0xaa400000aa4e, + 0xaa500000aa5a, + 0xaa600000aa77, + 0xaa7a0000aac3, + 0xaadb0000aade, + 0xaae00000aaf0, + 0xaaf20000aaf7, + 0xab010000ab07, + 0xab090000ab0f, + 0xab110000ab17, + 0xab200000ab27, + 0xab280000ab2f, + 0xab300000ab5b, + 0xab600000ab6a, + 0xabc00000abeb, + 0xabec0000abee, + 0xabf00000abfa, + 0xac000000d7a4, + 0xfa0e0000fa10, + 0xfa110000fa12, + 0xfa130000fa15, + 0xfa1f0000fa20, + 0xfa210000fa22, + 0xfa230000fa25, + 0xfa270000fa2a, + 0xfb1e0000fb1f, + 0xfe200000fe30, + 0xfe730000fe74, + 0x100000001000c, + 0x1000d00010027, + 0x100280001003b, + 0x1003c0001003e, + 0x1003f0001004e, + 0x100500001005e, + 0x10080000100fb, + 0x101fd000101fe, + 0x102800001029d, + 0x102a0000102d1, + 0x102e0000102e1, + 0x1030000010320, + 0x1032d00010341, + 0x103420001034a, + 0x103500001037b, + 0x103800001039e, + 0x103a0000103c4, + 0x103c8000103d0, + 0x104280001049e, + 0x104a0000104aa, + 0x104d8000104fc, + 0x1050000010528, + 0x1053000010564, + 0x10597000105a2, + 0x105a3000105b2, + 0x105b3000105ba, + 0x105bb000105bd, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1078000010786, + 0x10787000107b1, + 0x107b2000107bb, + 0x1080000010806, + 0x1080800010809, + 0x1080a00010836, + 0x1083700010839, + 0x1083c0001083d, + 0x1083f00010856, + 0x1086000010877, + 0x108800001089f, + 0x108e0000108f3, + 0x108f4000108f6, + 0x1090000010916, + 0x109200001093a, + 0x10980000109b8, + 0x109be000109c0, + 0x10a0000010a04, + 0x10a0500010a07, + 0x10a0c00010a14, + 0x10a1500010a18, + 0x10a1900010a36, + 0x10a3800010a3b, + 0x10a3f00010a40, + 0x10a6000010a7d, + 0x10a8000010a9d, + 0x10ac000010ac8, + 0x10ac900010ae7, + 0x10b0000010b36, + 0x10b4000010b56, + 0x10b6000010b73, + 0x10b8000010b92, + 0x10c0000010c49, + 0x10cc000010cf3, + 0x10d0000010d28, + 0x10d3000010d3a, + 0x10e8000010eaa, + 0x10eab00010ead, + 0x10eb000010eb2, + 0x10f0000010f1d, + 0x10f2700010f28, + 0x10f3000010f51, + 0x10f7000010f86, + 0x10fb000010fc5, + 0x10fe000010ff7, + 0x1100000011047, + 0x1106600011076, + 0x1107f000110bb, + 0x110c2000110c3, + 0x110d0000110e9, + 0x110f0000110fa, + 0x1110000011135, + 0x1113600011140, + 0x1114400011148, + 0x1115000011174, + 0x1117600011177, + 0x11180000111c5, + 0x111c9000111cd, + 0x111ce000111db, + 0x111dc000111dd, + 0x1120000011212, + 0x1121300011238, + 0x1123e0001123f, + 0x1128000011287, + 0x1128800011289, + 0x1128a0001128e, + 0x1128f0001129e, + 0x1129f000112a9, + 0x112b0000112eb, + 0x112f0000112fa, + 0x1130000011304, + 0x113050001130d, + 0x1130f00011311, + 0x1131300011329, + 0x1132a00011331, + 0x1133200011334, + 0x113350001133a, + 0x1133b00011345, + 0x1134700011349, + 0x1134b0001134e, + 0x1135000011351, + 0x1135700011358, + 0x1135d00011364, + 0x113660001136d, + 0x1137000011375, + 0x114000001144b, + 0x114500001145a, + 0x1145e00011462, + 0x11480000114c6, + 0x114c7000114c8, + 0x114d0000114da, + 0x11580000115b6, + 0x115b8000115c1, + 0x115d8000115de, + 0x1160000011641, + 0x1164400011645, + 0x116500001165a, + 0x11680000116b9, + 0x116c0000116ca, + 0x117000001171b, + 0x1171d0001172c, + 0x117300001173a, + 0x1174000011747, + 0x118000001183b, + 0x118c0000118ea, + 0x118ff00011907, + 0x119090001190a, + 0x1190c00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193b00011944, + 0x119500001195a, + 0x119a0000119a8, + 0x119aa000119d8, + 0x119da000119e2, + 0x119e3000119e5, + 0x11a0000011a3f, + 0x11a4700011a48, + 0x11a5000011a9a, + 0x11a9d00011a9e, + 0x11ab000011af9, + 0x11c0000011c09, + 0x11c0a00011c37, + 0x11c3800011c41, + 0x11c5000011c5a, + 0x11c7200011c90, + 0x11c9200011ca8, + 0x11ca900011cb7, + 0x11d0000011d07, + 0x11d0800011d0a, + 0x11d0b00011d37, + 0x11d3a00011d3b, + 0x11d3c00011d3e, + 0x11d3f00011d48, + 0x11d5000011d5a, + 0x11d6000011d66, + 0x11d6700011d69, + 0x11d6a00011d8f, + 0x11d9000011d92, + 0x11d9300011d99, + 0x11da000011daa, + 0x11ee000011ef7, + 0x11fb000011fb1, + 0x120000001239a, + 0x1248000012544, + 0x12f9000012ff1, + 0x130000001342f, + 0x1440000014647, + 0x1680000016a39, + 0x16a4000016a5f, + 0x16a6000016a6a, + 0x16a7000016abf, + 0x16ac000016aca, + 0x16ad000016aee, + 0x16af000016af5, + 0x16b0000016b37, + 0x16b4000016b44, + 0x16b5000016b5a, + 0x16b6300016b78, + 0x16b7d00016b90, + 0x16e6000016e80, + 0x16f0000016f4b, + 0x16f4f00016f88, + 0x16f8f00016fa0, + 0x16fe000016fe2, + 0x16fe300016fe5, + 0x16ff000016ff2, + 0x17000000187f8, + 0x1880000018cd6, + 0x18d0000018d09, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b123, + 0x1b1500001b153, + 0x1b1640001b168, + 0x1b1700001b2fc, + 0x1bc000001bc6b, + 0x1bc700001bc7d, + 0x1bc800001bc89, + 0x1bc900001bc9a, + 0x1bc9d0001bc9f, + 0x1cf000001cf2e, + 0x1cf300001cf47, + 0x1da000001da37, + 0x1da3b0001da6d, + 0x1da750001da76, + 0x1da840001da85, + 0x1da9b0001daa0, + 0x1daa10001dab0, + 0x1df000001df1f, + 0x1e0000001e007, + 0x1e0080001e019, + 0x1e01b0001e022, + 0x1e0230001e025, + 0x1e0260001e02b, + 0x1e1000001e12d, + 0x1e1300001e13e, + 0x1e1400001e14a, + 0x1e14e0001e14f, + 0x1e2900001e2af, + 0x1e2c00001e2fa, + 0x1e7e00001e7e7, + 0x1e7e80001e7ec, + 0x1e7ed0001e7ef, + 0x1e7f00001e7ff, + 0x1e8000001e8c5, + 0x1e8d00001e8d7, + 0x1e9220001e94c, + 0x1e9500001e95a, + 0x1fbf00001fbfa, + 0x200000002a6e0, + 0x2a7000002b739, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x300000003134b, + ), + 'CONTEXTJ': ( + 0x200c0000200e, + ), + 'CONTEXTO': ( + 0xb7000000b8, + 0x37500000376, + 0x5f3000005f5, + 0x6600000066a, + 0x6f0000006fa, + 0x30fb000030fc, + ), +} diff --git a/venv/lib/python3.10/site-packages/idna/intranges.py b/venv/lib/python3.10/site-packages/idna/intranges.py new file mode 100644 index 0000000..6a43b04 --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/intranges.py @@ -0,0 +1,54 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect +from typing import List, Tuple + +def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i+1 < len(sorted_list): + if sorted_list[i] == sorted_list[i+1]-1: + continue + current_range = sorted_list[last_write+1:i+1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + +def _encode_range(start: int, end: int) -> int: + return (start << 32) | end + +def _decode_range(r: int) -> Tuple[int, int]: + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos-1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/venv/lib/python3.10/site-packages/idna/package_data.py b/venv/lib/python3.10/site-packages/idna/package_data.py new file mode 100644 index 0000000..f5ea87c --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/package_data.py @@ -0,0 +1,2 @@ +__version__ = '3.3' + diff --git a/venv/lib/python3.10/site-packages/idna/py.typed b/venv/lib/python3.10/site-packages/idna/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/idna/uts46data.py b/venv/lib/python3.10/site-packages/idna/uts46data.py new file mode 100644 index 0000000..8f65705 --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/uts46data.py @@ -0,0 +1,8512 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +from typing import List, Tuple, Union + + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = '14.0.0' +def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', 'a'), + (0x42, 'M', 'b'), + (0x43, 'M', 'c'), + (0x44, 'M', 'd'), + (0x45, 'M', 'e'), + (0x46, 'M', 'f'), + (0x47, 'M', 'g'), + (0x48, 'M', 'h'), + (0x49, 'M', 'i'), + (0x4A, 'M', 'j'), + (0x4B, 'M', 'k'), + (0x4C, 'M', 'l'), + (0x4D, 'M', 'm'), + (0x4E, 'M', 'n'), + (0x4F, 'M', 'o'), + (0x50, 'M', 'p'), + (0x51, 'M', 'q'), + (0x52, 'M', 'r'), + (0x53, 'M', 's'), + (0x54, 'M', 't'), + (0x55, 'M', 'u'), + (0x56, 'M', 'v'), + (0x57, 'M', 'w'), + (0x58, 'M', 'x'), + (0x59, 'M', 'y'), + (0x5A, 'M', 'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + ] + +def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', ' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', ' ̈'), + (0xA9, 'V'), + (0xAA, 'M', 'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', ' ̄'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', '2'), + (0xB3, 'M', '3'), + (0xB4, '3', ' ́'), + (0xB5, 'M', 'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', ' ̧'), + (0xB9, 'M', '1'), + (0xBA, 'M', 'o'), + (0xBB, 'V'), + (0xBC, 'M', '1⁄4'), + (0xBD, 'M', '1⁄2'), + (0xBE, 'M', '3⁄4'), + (0xBF, 'V'), + (0xC0, 'M', 'à'), + (0xC1, 'M', 'á'), + (0xC2, 'M', 'â'), + (0xC3, 'M', 'ã'), + (0xC4, 'M', 'ä'), + (0xC5, 'M', 'å'), + (0xC6, 'M', 'æ'), + (0xC7, 'M', 'ç'), + ] + +def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC8, 'M', 'è'), + (0xC9, 'M', 'é'), + (0xCA, 'M', 'ê'), + (0xCB, 'M', 'ë'), + (0xCC, 'M', 'ì'), + (0xCD, 'M', 'í'), + (0xCE, 'M', 'î'), + (0xCF, 'M', 'ï'), + (0xD0, 'M', 'ð'), + (0xD1, 'M', 'ñ'), + (0xD2, 'M', 'ò'), + (0xD3, 'M', 'ó'), + (0xD4, 'M', 'ô'), + (0xD5, 'M', 'õ'), + (0xD6, 'M', 'ö'), + (0xD7, 'V'), + (0xD8, 'M', 'ø'), + (0xD9, 'M', 'ù'), + (0xDA, 'M', 'ú'), + (0xDB, 'M', 'û'), + (0xDC, 'M', 'ü'), + (0xDD, 'M', 'ý'), + (0xDE, 'M', 'þ'), + (0xDF, 'D', 'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', 'ā'), + (0x101, 'V'), + (0x102, 'M', 'ă'), + (0x103, 'V'), + (0x104, 'M', 'ą'), + (0x105, 'V'), + (0x106, 'M', 'ć'), + (0x107, 'V'), + (0x108, 'M', 'ĉ'), + (0x109, 'V'), + (0x10A, 'M', 'ċ'), + (0x10B, 'V'), + (0x10C, 'M', 'č'), + (0x10D, 'V'), + (0x10E, 'M', 'ď'), + (0x10F, 'V'), + (0x110, 'M', 'đ'), + (0x111, 'V'), + (0x112, 'M', 'ē'), + (0x113, 'V'), + (0x114, 'M', 'ĕ'), + (0x115, 'V'), + (0x116, 'M', 'ė'), + (0x117, 'V'), + (0x118, 'M', 'ę'), + (0x119, 'V'), + (0x11A, 'M', 'ě'), + (0x11B, 'V'), + (0x11C, 'M', 'ĝ'), + (0x11D, 'V'), + (0x11E, 'M', 'ğ'), + (0x11F, 'V'), + (0x120, 'M', 'ġ'), + (0x121, 'V'), + (0x122, 'M', 'ģ'), + (0x123, 'V'), + (0x124, 'M', 'ĥ'), + (0x125, 'V'), + (0x126, 'M', 'ħ'), + (0x127, 'V'), + (0x128, 'M', 'ĩ'), + (0x129, 'V'), + (0x12A, 'M', 'ī'), + (0x12B, 'V'), + ] + +def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x12C, 'M', 'ĭ'), + (0x12D, 'V'), + (0x12E, 'M', 'į'), + (0x12F, 'V'), + (0x130, 'M', 'i̇'), + (0x131, 'V'), + (0x132, 'M', 'ij'), + (0x134, 'M', 'ĵ'), + (0x135, 'V'), + (0x136, 'M', 'ķ'), + (0x137, 'V'), + (0x139, 'M', 'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', 'ļ'), + (0x13C, 'V'), + (0x13D, 'M', 'ľ'), + (0x13E, 'V'), + (0x13F, 'M', 'l·'), + (0x141, 'M', 'ł'), + (0x142, 'V'), + (0x143, 'M', 'ń'), + (0x144, 'V'), + (0x145, 'M', 'ņ'), + (0x146, 'V'), + (0x147, 'M', 'ň'), + (0x148, 'V'), + (0x149, 'M', 'ʼn'), + (0x14A, 'M', 'ŋ'), + (0x14B, 'V'), + (0x14C, 'M', 'ō'), + (0x14D, 'V'), + (0x14E, 'M', 'ŏ'), + (0x14F, 'V'), + (0x150, 'M', 'ő'), + (0x151, 'V'), + (0x152, 'M', 'œ'), + (0x153, 'V'), + (0x154, 'M', 'ŕ'), + (0x155, 'V'), + (0x156, 'M', 'ŗ'), + (0x157, 'V'), + (0x158, 'M', 'ř'), + (0x159, 'V'), + (0x15A, 'M', 'ś'), + (0x15B, 'V'), + (0x15C, 'M', 'ŝ'), + (0x15D, 'V'), + (0x15E, 'M', 'ş'), + (0x15F, 'V'), + (0x160, 'M', 'š'), + (0x161, 'V'), + (0x162, 'M', 'ţ'), + (0x163, 'V'), + (0x164, 'M', 'ť'), + (0x165, 'V'), + (0x166, 'M', 'ŧ'), + (0x167, 'V'), + (0x168, 'M', 'ũ'), + (0x169, 'V'), + (0x16A, 'M', 'ū'), + (0x16B, 'V'), + (0x16C, 'M', 'ŭ'), + (0x16D, 'V'), + (0x16E, 'M', 'ů'), + (0x16F, 'V'), + (0x170, 'M', 'ű'), + (0x171, 'V'), + (0x172, 'M', 'ų'), + (0x173, 'V'), + (0x174, 'M', 'ŵ'), + (0x175, 'V'), + (0x176, 'M', 'ŷ'), + (0x177, 'V'), + (0x178, 'M', 'ÿ'), + (0x179, 'M', 'ź'), + (0x17A, 'V'), + (0x17B, 'M', 'ż'), + (0x17C, 'V'), + (0x17D, 'M', 'ž'), + (0x17E, 'V'), + (0x17F, 'M', 's'), + (0x180, 'V'), + (0x181, 'M', 'ɓ'), + (0x182, 'M', 'ƃ'), + (0x183, 'V'), + (0x184, 'M', 'ƅ'), + (0x185, 'V'), + (0x186, 'M', 'ɔ'), + (0x187, 'M', 'ƈ'), + (0x188, 'V'), + (0x189, 'M', 'ɖ'), + (0x18A, 'M', 'ɗ'), + (0x18B, 'M', 'ƌ'), + (0x18C, 'V'), + (0x18E, 'M', 'ǝ'), + (0x18F, 'M', 'ə'), + (0x190, 'M', 'ɛ'), + (0x191, 'M', 'ƒ'), + (0x192, 'V'), + (0x193, 'M', 'ɠ'), + ] + +def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x194, 'M', 'ɣ'), + (0x195, 'V'), + (0x196, 'M', 'ɩ'), + (0x197, 'M', 'ɨ'), + (0x198, 'M', 'ƙ'), + (0x199, 'V'), + (0x19C, 'M', 'ɯ'), + (0x19D, 'M', 'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', 'ɵ'), + (0x1A0, 'M', 'ơ'), + (0x1A1, 'V'), + (0x1A2, 'M', 'ƣ'), + (0x1A3, 'V'), + (0x1A4, 'M', 'ƥ'), + (0x1A5, 'V'), + (0x1A6, 'M', 'ʀ'), + (0x1A7, 'M', 'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', 'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', 'ƭ'), + (0x1AD, 'V'), + (0x1AE, 'M', 'ʈ'), + (0x1AF, 'M', 'ư'), + (0x1B0, 'V'), + (0x1B1, 'M', 'ʊ'), + (0x1B2, 'M', 'ʋ'), + (0x1B3, 'M', 'ƴ'), + (0x1B4, 'V'), + (0x1B5, 'M', 'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', 'ʒ'), + (0x1B8, 'M', 'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', 'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', 'dž'), + (0x1C7, 'M', 'lj'), + (0x1CA, 'M', 'nj'), + (0x1CD, 'M', 'ǎ'), + (0x1CE, 'V'), + (0x1CF, 'M', 'ǐ'), + (0x1D0, 'V'), + (0x1D1, 'M', 'ǒ'), + (0x1D2, 'V'), + (0x1D3, 'M', 'ǔ'), + (0x1D4, 'V'), + (0x1D5, 'M', 'ǖ'), + (0x1D6, 'V'), + (0x1D7, 'M', 'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', 'ǚ'), + (0x1DA, 'V'), + (0x1DB, 'M', 'ǜ'), + (0x1DC, 'V'), + (0x1DE, 'M', 'ǟ'), + (0x1DF, 'V'), + (0x1E0, 'M', 'ǡ'), + (0x1E1, 'V'), + (0x1E2, 'M', 'ǣ'), + (0x1E3, 'V'), + (0x1E4, 'M', 'ǥ'), + (0x1E5, 'V'), + (0x1E6, 'M', 'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', 'ǩ'), + (0x1E9, 'V'), + (0x1EA, 'M', 'ǫ'), + (0x1EB, 'V'), + (0x1EC, 'M', 'ǭ'), + (0x1ED, 'V'), + (0x1EE, 'M', 'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', 'dz'), + (0x1F4, 'M', 'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', 'ƕ'), + (0x1F7, 'M', 'ƿ'), + (0x1F8, 'M', 'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', 'ǻ'), + (0x1FB, 'V'), + (0x1FC, 'M', 'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', 'ǿ'), + (0x1FF, 'V'), + (0x200, 'M', 'ȁ'), + (0x201, 'V'), + (0x202, 'M', 'ȃ'), + (0x203, 'V'), + (0x204, 'M', 'ȅ'), + (0x205, 'V'), + (0x206, 'M', 'ȇ'), + (0x207, 'V'), + (0x208, 'M', 'ȉ'), + (0x209, 'V'), + (0x20A, 'M', 'ȋ'), + (0x20B, 'V'), + (0x20C, 'M', 'ȍ'), + ] + +def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x20D, 'V'), + (0x20E, 'M', 'ȏ'), + (0x20F, 'V'), + (0x210, 'M', 'ȑ'), + (0x211, 'V'), + (0x212, 'M', 'ȓ'), + (0x213, 'V'), + (0x214, 'M', 'ȕ'), + (0x215, 'V'), + (0x216, 'M', 'ȗ'), + (0x217, 'V'), + (0x218, 'M', 'ș'), + (0x219, 'V'), + (0x21A, 'M', 'ț'), + (0x21B, 'V'), + (0x21C, 'M', 'ȝ'), + (0x21D, 'V'), + (0x21E, 'M', 'ȟ'), + (0x21F, 'V'), + (0x220, 'M', 'ƞ'), + (0x221, 'V'), + (0x222, 'M', 'ȣ'), + (0x223, 'V'), + (0x224, 'M', 'ȥ'), + (0x225, 'V'), + (0x226, 'M', 'ȧ'), + (0x227, 'V'), + (0x228, 'M', 'ȩ'), + (0x229, 'V'), + (0x22A, 'M', 'ȫ'), + (0x22B, 'V'), + (0x22C, 'M', 'ȭ'), + (0x22D, 'V'), + (0x22E, 'M', 'ȯ'), + (0x22F, 'V'), + (0x230, 'M', 'ȱ'), + (0x231, 'V'), + (0x232, 'M', 'ȳ'), + (0x233, 'V'), + (0x23A, 'M', 'ⱥ'), + (0x23B, 'M', 'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', 'ƚ'), + (0x23E, 'M', 'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', 'ɂ'), + (0x242, 'V'), + (0x243, 'M', 'ƀ'), + (0x244, 'M', 'ʉ'), + (0x245, 'M', 'ʌ'), + (0x246, 'M', 'ɇ'), + (0x247, 'V'), + (0x248, 'M', 'ɉ'), + (0x249, 'V'), + (0x24A, 'M', 'ɋ'), + (0x24B, 'V'), + (0x24C, 'M', 'ɍ'), + (0x24D, 'V'), + (0x24E, 'M', 'ɏ'), + (0x24F, 'V'), + (0x2B0, 'M', 'h'), + (0x2B1, 'M', 'ɦ'), + (0x2B2, 'M', 'j'), + (0x2B3, 'M', 'r'), + (0x2B4, 'M', 'ɹ'), + (0x2B5, 'M', 'ɻ'), + (0x2B6, 'M', 'ʁ'), + (0x2B7, 'M', 'w'), + (0x2B8, 'M', 'y'), + (0x2B9, 'V'), + (0x2D8, '3', ' ̆'), + (0x2D9, '3', ' ̇'), + (0x2DA, '3', ' ̊'), + (0x2DB, '3', ' ̨'), + (0x2DC, '3', ' ̃'), + (0x2DD, '3', ' ̋'), + (0x2DE, 'V'), + (0x2E0, 'M', 'ɣ'), + (0x2E1, 'M', 'l'), + (0x2E2, 'M', 's'), + (0x2E3, 'M', 'x'), + (0x2E4, 'M', 'ʕ'), + (0x2E5, 'V'), + (0x340, 'M', '̀'), + (0x341, 'M', '́'), + (0x342, 'V'), + (0x343, 'M', '̓'), + (0x344, 'M', '̈́'), + (0x345, 'M', 'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', 'ͱ'), + (0x371, 'V'), + (0x372, 'M', 'ͳ'), + (0x373, 'V'), + (0x374, 'M', 'ʹ'), + (0x375, 'V'), + (0x376, 'M', 'ͷ'), + (0x377, 'V'), + ] + +def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x378, 'X'), + (0x37A, '3', ' ι'), + (0x37B, 'V'), + (0x37E, '3', ';'), + (0x37F, 'M', 'ϳ'), + (0x380, 'X'), + (0x384, '3', ' ́'), + (0x385, '3', ' ̈́'), + (0x386, 'M', 'ά'), + (0x387, 'M', '·'), + (0x388, 'M', 'έ'), + (0x389, 'M', 'ή'), + (0x38A, 'M', 'ί'), + (0x38B, 'X'), + (0x38C, 'M', 'ό'), + (0x38D, 'X'), + (0x38E, 'M', 'ύ'), + (0x38F, 'M', 'ώ'), + (0x390, 'V'), + (0x391, 'M', 'α'), + (0x392, 'M', 'β'), + (0x393, 'M', 'γ'), + (0x394, 'M', 'δ'), + (0x395, 'M', 'ε'), + (0x396, 'M', 'ζ'), + (0x397, 'M', 'η'), + (0x398, 'M', 'θ'), + (0x399, 'M', 'ι'), + (0x39A, 'M', 'κ'), + (0x39B, 'M', 'λ'), + (0x39C, 'M', 'μ'), + (0x39D, 'M', 'ν'), + (0x39E, 'M', 'ξ'), + (0x39F, 'M', 'ο'), + (0x3A0, 'M', 'π'), + (0x3A1, 'M', 'ρ'), + (0x3A2, 'X'), + (0x3A3, 'M', 'σ'), + (0x3A4, 'M', 'τ'), + (0x3A5, 'M', 'υ'), + (0x3A6, 'M', 'φ'), + (0x3A7, 'M', 'χ'), + (0x3A8, 'M', 'ψ'), + (0x3A9, 'M', 'ω'), + (0x3AA, 'M', 'ϊ'), + (0x3AB, 'M', 'ϋ'), + (0x3AC, 'V'), + (0x3C2, 'D', 'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', 'ϗ'), + (0x3D0, 'M', 'β'), + (0x3D1, 'M', 'θ'), + (0x3D2, 'M', 'υ'), + (0x3D3, 'M', 'ύ'), + (0x3D4, 'M', 'ϋ'), + (0x3D5, 'M', 'φ'), + (0x3D6, 'M', 'π'), + (0x3D7, 'V'), + (0x3D8, 'M', 'ϙ'), + (0x3D9, 'V'), + (0x3DA, 'M', 'ϛ'), + (0x3DB, 'V'), + (0x3DC, 'M', 'ϝ'), + (0x3DD, 'V'), + (0x3DE, 'M', 'ϟ'), + (0x3DF, 'V'), + (0x3E0, 'M', 'ϡ'), + (0x3E1, 'V'), + (0x3E2, 'M', 'ϣ'), + (0x3E3, 'V'), + (0x3E4, 'M', 'ϥ'), + (0x3E5, 'V'), + (0x3E6, 'M', 'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', 'ϩ'), + (0x3E9, 'V'), + (0x3EA, 'M', 'ϫ'), + (0x3EB, 'V'), + (0x3EC, 'M', 'ϭ'), + (0x3ED, 'V'), + (0x3EE, 'M', 'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', 'κ'), + (0x3F1, 'M', 'ρ'), + (0x3F2, 'M', 'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', 'θ'), + (0x3F5, 'M', 'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', 'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', 'σ'), + (0x3FA, 'M', 'ϻ'), + (0x3FB, 'V'), + (0x3FD, 'M', 'ͻ'), + (0x3FE, 'M', 'ͼ'), + (0x3FF, 'M', 'ͽ'), + (0x400, 'M', 'ѐ'), + (0x401, 'M', 'ё'), + (0x402, 'M', 'ђ'), + ] + +def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x403, 'M', 'ѓ'), + (0x404, 'M', 'є'), + (0x405, 'M', 'ѕ'), + (0x406, 'M', 'і'), + (0x407, 'M', 'ї'), + (0x408, 'M', 'ј'), + (0x409, 'M', 'љ'), + (0x40A, 'M', 'њ'), + (0x40B, 'M', 'ћ'), + (0x40C, 'M', 'ќ'), + (0x40D, 'M', 'ѝ'), + (0x40E, 'M', 'ў'), + (0x40F, 'M', 'џ'), + (0x410, 'M', 'а'), + (0x411, 'M', 'б'), + (0x412, 'M', 'в'), + (0x413, 'M', 'г'), + (0x414, 'M', 'д'), + (0x415, 'M', 'е'), + (0x416, 'M', 'ж'), + (0x417, 'M', 'з'), + (0x418, 'M', 'и'), + (0x419, 'M', 'й'), + (0x41A, 'M', 'к'), + (0x41B, 'M', 'л'), + (0x41C, 'M', 'м'), + (0x41D, 'M', 'н'), + (0x41E, 'M', 'о'), + (0x41F, 'M', 'п'), + (0x420, 'M', 'р'), + (0x421, 'M', 'с'), + (0x422, 'M', 'т'), + (0x423, 'M', 'у'), + (0x424, 'M', 'ф'), + (0x425, 'M', 'х'), + (0x426, 'M', 'ц'), + (0x427, 'M', 'ч'), + (0x428, 'M', 'ш'), + (0x429, 'M', 'щ'), + (0x42A, 'M', 'ъ'), + (0x42B, 'M', 'ы'), + (0x42C, 'M', 'ь'), + (0x42D, 'M', 'э'), + (0x42E, 'M', 'ю'), + (0x42F, 'M', 'я'), + (0x430, 'V'), + (0x460, 'M', 'ѡ'), + (0x461, 'V'), + (0x462, 'M', 'ѣ'), + (0x463, 'V'), + (0x464, 'M', 'ѥ'), + (0x465, 'V'), + (0x466, 'M', 'ѧ'), + (0x467, 'V'), + (0x468, 'M', 'ѩ'), + (0x469, 'V'), + (0x46A, 'M', 'ѫ'), + (0x46B, 'V'), + (0x46C, 'M', 'ѭ'), + (0x46D, 'V'), + (0x46E, 'M', 'ѯ'), + (0x46F, 'V'), + (0x470, 'M', 'ѱ'), + (0x471, 'V'), + (0x472, 'M', 'ѳ'), + (0x473, 'V'), + (0x474, 'M', 'ѵ'), + (0x475, 'V'), + (0x476, 'M', 'ѷ'), + (0x477, 'V'), + (0x478, 'M', 'ѹ'), + (0x479, 'V'), + (0x47A, 'M', 'ѻ'), + (0x47B, 'V'), + (0x47C, 'M', 'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', 'ѿ'), + (0x47F, 'V'), + (0x480, 'M', 'ҁ'), + (0x481, 'V'), + (0x48A, 'M', 'ҋ'), + (0x48B, 'V'), + (0x48C, 'M', 'ҍ'), + (0x48D, 'V'), + (0x48E, 'M', 'ҏ'), + (0x48F, 'V'), + (0x490, 'M', 'ґ'), + (0x491, 'V'), + (0x492, 'M', 'ғ'), + (0x493, 'V'), + (0x494, 'M', 'ҕ'), + (0x495, 'V'), + (0x496, 'M', 'җ'), + (0x497, 'V'), + (0x498, 'M', 'ҙ'), + (0x499, 'V'), + (0x49A, 'M', 'қ'), + (0x49B, 'V'), + (0x49C, 'M', 'ҝ'), + (0x49D, 'V'), + ] + +def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x49E, 'M', 'ҟ'), + (0x49F, 'V'), + (0x4A0, 'M', 'ҡ'), + (0x4A1, 'V'), + (0x4A2, 'M', 'ң'), + (0x4A3, 'V'), + (0x4A4, 'M', 'ҥ'), + (0x4A5, 'V'), + (0x4A6, 'M', 'ҧ'), + (0x4A7, 'V'), + (0x4A8, 'M', 'ҩ'), + (0x4A9, 'V'), + (0x4AA, 'M', 'ҫ'), + (0x4AB, 'V'), + (0x4AC, 'M', 'ҭ'), + (0x4AD, 'V'), + (0x4AE, 'M', 'ү'), + (0x4AF, 'V'), + (0x4B0, 'M', 'ұ'), + (0x4B1, 'V'), + (0x4B2, 'M', 'ҳ'), + (0x4B3, 'V'), + (0x4B4, 'M', 'ҵ'), + (0x4B5, 'V'), + (0x4B6, 'M', 'ҷ'), + (0x4B7, 'V'), + (0x4B8, 'M', 'ҹ'), + (0x4B9, 'V'), + (0x4BA, 'M', 'һ'), + (0x4BB, 'V'), + (0x4BC, 'M', 'ҽ'), + (0x4BD, 'V'), + (0x4BE, 'M', 'ҿ'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', 'ӂ'), + (0x4C2, 'V'), + (0x4C3, 'M', 'ӄ'), + (0x4C4, 'V'), + (0x4C5, 'M', 'ӆ'), + (0x4C6, 'V'), + (0x4C7, 'M', 'ӈ'), + (0x4C8, 'V'), + (0x4C9, 'M', 'ӊ'), + (0x4CA, 'V'), + (0x4CB, 'M', 'ӌ'), + (0x4CC, 'V'), + (0x4CD, 'M', 'ӎ'), + (0x4CE, 'V'), + (0x4D0, 'M', 'ӑ'), + (0x4D1, 'V'), + (0x4D2, 'M', 'ӓ'), + (0x4D3, 'V'), + (0x4D4, 'M', 'ӕ'), + (0x4D5, 'V'), + (0x4D6, 'M', 'ӗ'), + (0x4D7, 'V'), + (0x4D8, 'M', 'ә'), + (0x4D9, 'V'), + (0x4DA, 'M', 'ӛ'), + (0x4DB, 'V'), + (0x4DC, 'M', 'ӝ'), + (0x4DD, 'V'), + (0x4DE, 'M', 'ӟ'), + (0x4DF, 'V'), + (0x4E0, 'M', 'ӡ'), + (0x4E1, 'V'), + (0x4E2, 'M', 'ӣ'), + (0x4E3, 'V'), + (0x4E4, 'M', 'ӥ'), + (0x4E5, 'V'), + (0x4E6, 'M', 'ӧ'), + (0x4E7, 'V'), + (0x4E8, 'M', 'ө'), + (0x4E9, 'V'), + (0x4EA, 'M', 'ӫ'), + (0x4EB, 'V'), + (0x4EC, 'M', 'ӭ'), + (0x4ED, 'V'), + (0x4EE, 'M', 'ӯ'), + (0x4EF, 'V'), + (0x4F0, 'M', 'ӱ'), + (0x4F1, 'V'), + (0x4F2, 'M', 'ӳ'), + (0x4F3, 'V'), + (0x4F4, 'M', 'ӵ'), + (0x4F5, 'V'), + (0x4F6, 'M', 'ӷ'), + (0x4F7, 'V'), + (0x4F8, 'M', 'ӹ'), + (0x4F9, 'V'), + (0x4FA, 'M', 'ӻ'), + (0x4FB, 'V'), + (0x4FC, 'M', 'ӽ'), + (0x4FD, 'V'), + (0x4FE, 'M', 'ӿ'), + (0x4FF, 'V'), + (0x500, 'M', 'ԁ'), + (0x501, 'V'), + (0x502, 'M', 'ԃ'), + ] + +def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x503, 'V'), + (0x504, 'M', 'ԅ'), + (0x505, 'V'), + (0x506, 'M', 'ԇ'), + (0x507, 'V'), + (0x508, 'M', 'ԉ'), + (0x509, 'V'), + (0x50A, 'M', 'ԋ'), + (0x50B, 'V'), + (0x50C, 'M', 'ԍ'), + (0x50D, 'V'), + (0x50E, 'M', 'ԏ'), + (0x50F, 'V'), + (0x510, 'M', 'ԑ'), + (0x511, 'V'), + (0x512, 'M', 'ԓ'), + (0x513, 'V'), + (0x514, 'M', 'ԕ'), + (0x515, 'V'), + (0x516, 'M', 'ԗ'), + (0x517, 'V'), + (0x518, 'M', 'ԙ'), + (0x519, 'V'), + (0x51A, 'M', 'ԛ'), + (0x51B, 'V'), + (0x51C, 'M', 'ԝ'), + (0x51D, 'V'), + (0x51E, 'M', 'ԟ'), + (0x51F, 'V'), + (0x520, 'M', 'ԡ'), + (0x521, 'V'), + (0x522, 'M', 'ԣ'), + (0x523, 'V'), + (0x524, 'M', 'ԥ'), + (0x525, 'V'), + (0x526, 'M', 'ԧ'), + (0x527, 'V'), + (0x528, 'M', 'ԩ'), + (0x529, 'V'), + (0x52A, 'M', 'ԫ'), + (0x52B, 'V'), + (0x52C, 'M', 'ԭ'), + (0x52D, 'V'), + (0x52E, 'M', 'ԯ'), + (0x52F, 'V'), + (0x530, 'X'), + (0x531, 'M', 'ա'), + (0x532, 'M', 'բ'), + (0x533, 'M', 'գ'), + (0x534, 'M', 'դ'), + (0x535, 'M', 'ե'), + (0x536, 'M', 'զ'), + (0x537, 'M', 'է'), + (0x538, 'M', 'ը'), + (0x539, 'M', 'թ'), + (0x53A, 'M', 'ժ'), + (0x53B, 'M', 'ի'), + (0x53C, 'M', 'լ'), + (0x53D, 'M', 'խ'), + (0x53E, 'M', 'ծ'), + (0x53F, 'M', 'կ'), + (0x540, 'M', 'հ'), + (0x541, 'M', 'ձ'), + (0x542, 'M', 'ղ'), + (0x543, 'M', 'ճ'), + (0x544, 'M', 'մ'), + (0x545, 'M', 'յ'), + (0x546, 'M', 'ն'), + (0x547, 'M', 'շ'), + (0x548, 'M', 'ո'), + (0x549, 'M', 'չ'), + (0x54A, 'M', 'պ'), + (0x54B, 'M', 'ջ'), + (0x54C, 'M', 'ռ'), + (0x54D, 'M', 'ս'), + (0x54E, 'M', 'վ'), + (0x54F, 'M', 'տ'), + (0x550, 'M', 'ր'), + (0x551, 'M', 'ց'), + (0x552, 'M', 'ւ'), + (0x553, 'M', 'փ'), + (0x554, 'M', 'ք'), + (0x555, 'M', 'օ'), + (0x556, 'M', 'ֆ'), + (0x557, 'X'), + (0x559, 'V'), + (0x587, 'M', 'եւ'), + (0x588, 'V'), + (0x58B, 'X'), + (0x58D, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5EF, 'V'), + (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61D, 'V'), + ] + +def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x675, 'M', 'اٴ'), + (0x676, 'M', 'وٴ'), + (0x677, 'M', 'ۇٴ'), + (0x678, 'M', 'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x7FD, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x860, 'V'), + (0x86B, 'X'), + (0x870, 'V'), + (0x88F, 'X'), + (0x898, 'V'), + (0x8E2, 'X'), + (0x8E3, 'V'), + (0x958, 'M', 'क़'), + (0x959, 'M', 'ख़'), + (0x95A, 'M', 'ग़'), + (0x95B, 'M', 'ज़'), + (0x95C, 'M', 'ड़'), + (0x95D, 'M', 'ढ़'), + (0x95E, 'M', 'फ़'), + (0x95F, 'M', 'य़'), + (0x960, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', 'ড়'), + (0x9DD, 'M', 'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', 'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FF, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', 'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', 'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + (0xA59, 'M', 'ਖ਼'), + (0xA5A, 'M', 'ਗ਼'), + (0xA5B, 'M', 'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), + ] + +def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA5E, 'M', 'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA77, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xAF9, 'V'), + (0xB00, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB55, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', 'ଡ଼'), + (0xB5D, 'M', 'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC00, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), + ] + +def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC3A, 'X'), + (0xC3C, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5B, 'X'), + (0xC5D, 'V'), + (0xC5E, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC77, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDD, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF3, 'X'), + (0xD00, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD50, 'X'), + (0xD54, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD80, 'X'), + (0xD81, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDE6, 'V'), + (0xDF0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', 'ํา'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + (0xE86, 'V'), + (0xE8B, 'X'), + (0xE8C, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEB3, 'M', 'ໍາ'), + (0xEB4, 'V'), + ] + +def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECE, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', 'ຫນ'), + (0xEDD, 'M', 'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', '་'), + (0xF0D, 'V'), + (0xF43, 'M', 'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', 'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', 'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', 'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', 'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', 'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', 'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', 'ཱུ'), + (0xF76, 'M', 'ྲྀ'), + (0xF77, 'M', 'ྲཱྀ'), + (0xF78, 'M', 'ླྀ'), + (0xF79, 'M', 'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', 'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', 'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', 'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', 'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', 'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', 'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', 'ྐྵ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', 'ⴧ'), + (0x10C8, 'X'), + (0x10CD, 'M', 'ⴭ'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', 'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + ] + +def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F6, 'X'), + (0x13F8, 'M', 'Ᏸ'), + (0x13F9, 'M', 'Ᏹ'), + (0x13FA, 'M', 'Ᏺ'), + (0x13FB, 'M', 'Ᏻ'), + (0x13FC, 'M', 'Ᏼ'), + (0x13FD, 'M', 'Ᏽ'), + (0x13FE, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F9, 'X'), + (0x1700, 'V'), + (0x1716, 'X'), + (0x171F, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x180F, 'I'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1879, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191F, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1AB0, 'V'), + (0x1ACF, 'X'), + (0x1B00, 'V'), + (0x1B4D, 'X'), + (0x1B50, 'V'), + (0x1B7F, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + (0x1C80, 'M', 'в'), + ] + +def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1C81, 'M', 'д'), + (0x1C82, 'M', 'о'), + (0x1C83, 'M', 'с'), + (0x1C84, 'M', 'т'), + (0x1C86, 'M', 'ъ'), + (0x1C87, 'M', 'ѣ'), + (0x1C88, 'M', 'ꙋ'), + (0x1C89, 'X'), + (0x1C90, 'M', 'ა'), + (0x1C91, 'M', 'ბ'), + (0x1C92, 'M', 'გ'), + (0x1C93, 'M', 'დ'), + (0x1C94, 'M', 'ე'), + (0x1C95, 'M', 'ვ'), + (0x1C96, 'M', 'ზ'), + (0x1C97, 'M', 'თ'), + (0x1C98, 'M', 'ი'), + (0x1C99, 'M', 'კ'), + (0x1C9A, 'M', 'ლ'), + (0x1C9B, 'M', 'მ'), + (0x1C9C, 'M', 'ნ'), + (0x1C9D, 'M', 'ო'), + (0x1C9E, 'M', 'პ'), + (0x1C9F, 'M', 'ჟ'), + (0x1CA0, 'M', 'რ'), + (0x1CA1, 'M', 'ს'), + (0x1CA2, 'M', 'ტ'), + (0x1CA3, 'M', 'უ'), + (0x1CA4, 'M', 'ფ'), + (0x1CA5, 'M', 'ქ'), + (0x1CA6, 'M', 'ღ'), + (0x1CA7, 'M', 'ყ'), + (0x1CA8, 'M', 'შ'), + (0x1CA9, 'M', 'ჩ'), + (0x1CAA, 'M', 'ც'), + (0x1CAB, 'M', 'ძ'), + (0x1CAC, 'M', 'წ'), + (0x1CAD, 'M', 'ჭ'), + (0x1CAE, 'M', 'ხ'), + (0x1CAF, 'M', 'ჯ'), + (0x1CB0, 'M', 'ჰ'), + (0x1CB1, 'M', 'ჱ'), + (0x1CB2, 'M', 'ჲ'), + (0x1CB3, 'M', 'ჳ'), + (0x1CB4, 'M', 'ჴ'), + (0x1CB5, 'M', 'ჵ'), + (0x1CB6, 'M', 'ჶ'), + (0x1CB7, 'M', 'ჷ'), + (0x1CB8, 'M', 'ჸ'), + (0x1CB9, 'M', 'ჹ'), + (0x1CBA, 'M', 'ჺ'), + (0x1CBB, 'X'), + (0x1CBD, 'M', 'ჽ'), + (0x1CBE, 'M', 'ჾ'), + (0x1CBF, 'M', 'ჿ'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CFB, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', 'a'), + (0x1D2D, 'M', 'æ'), + (0x1D2E, 'M', 'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', 'd'), + (0x1D31, 'M', 'e'), + (0x1D32, 'M', 'ǝ'), + (0x1D33, 'M', 'g'), + (0x1D34, 'M', 'h'), + (0x1D35, 'M', 'i'), + (0x1D36, 'M', 'j'), + (0x1D37, 'M', 'k'), + (0x1D38, 'M', 'l'), + (0x1D39, 'M', 'm'), + (0x1D3A, 'M', 'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', 'o'), + (0x1D3D, 'M', 'ȣ'), + (0x1D3E, 'M', 'p'), + (0x1D3F, 'M', 'r'), + (0x1D40, 'M', 't'), + (0x1D41, 'M', 'u'), + (0x1D42, 'M', 'w'), + (0x1D43, 'M', 'a'), + (0x1D44, 'M', 'ɐ'), + (0x1D45, 'M', 'ɑ'), + (0x1D46, 'M', 'ᴂ'), + (0x1D47, 'M', 'b'), + (0x1D48, 'M', 'd'), + (0x1D49, 'M', 'e'), + (0x1D4A, 'M', 'ə'), + (0x1D4B, 'M', 'ɛ'), + (0x1D4C, 'M', 'ɜ'), + (0x1D4D, 'M', 'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', 'k'), + (0x1D50, 'M', 'm'), + (0x1D51, 'M', 'ŋ'), + (0x1D52, 'M', 'o'), + (0x1D53, 'M', 'ɔ'), + ] + +def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D54, 'M', 'ᴖ'), + (0x1D55, 'M', 'ᴗ'), + (0x1D56, 'M', 'p'), + (0x1D57, 'M', 't'), + (0x1D58, 'M', 'u'), + (0x1D59, 'M', 'ᴝ'), + (0x1D5A, 'M', 'ɯ'), + (0x1D5B, 'M', 'v'), + (0x1D5C, 'M', 'ᴥ'), + (0x1D5D, 'M', 'β'), + (0x1D5E, 'M', 'γ'), + (0x1D5F, 'M', 'δ'), + (0x1D60, 'M', 'φ'), + (0x1D61, 'M', 'χ'), + (0x1D62, 'M', 'i'), + (0x1D63, 'M', 'r'), + (0x1D64, 'M', 'u'), + (0x1D65, 'M', 'v'), + (0x1D66, 'M', 'β'), + (0x1D67, 'M', 'γ'), + (0x1D68, 'M', 'ρ'), + (0x1D69, 'M', 'φ'), + (0x1D6A, 'M', 'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', 'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', 'ɒ'), + (0x1D9C, 'M', 'c'), + (0x1D9D, 'M', 'ɕ'), + (0x1D9E, 'M', 'ð'), + (0x1D9F, 'M', 'ɜ'), + (0x1DA0, 'M', 'f'), + (0x1DA1, 'M', 'ɟ'), + (0x1DA2, 'M', 'ɡ'), + (0x1DA3, 'M', 'ɥ'), + (0x1DA4, 'M', 'ɨ'), + (0x1DA5, 'M', 'ɩ'), + (0x1DA6, 'M', 'ɪ'), + (0x1DA7, 'M', 'ᵻ'), + (0x1DA8, 'M', 'ʝ'), + (0x1DA9, 'M', 'ɭ'), + (0x1DAA, 'M', 'ᶅ'), + (0x1DAB, 'M', 'ʟ'), + (0x1DAC, 'M', 'ɱ'), + (0x1DAD, 'M', 'ɰ'), + (0x1DAE, 'M', 'ɲ'), + (0x1DAF, 'M', 'ɳ'), + (0x1DB0, 'M', 'ɴ'), + (0x1DB1, 'M', 'ɵ'), + (0x1DB2, 'M', 'ɸ'), + (0x1DB3, 'M', 'ʂ'), + (0x1DB4, 'M', 'ʃ'), + (0x1DB5, 'M', 'ƫ'), + (0x1DB6, 'M', 'ʉ'), + (0x1DB7, 'M', 'ʊ'), + (0x1DB8, 'M', 'ᴜ'), + (0x1DB9, 'M', 'ʋ'), + (0x1DBA, 'M', 'ʌ'), + (0x1DBB, 'M', 'z'), + (0x1DBC, 'M', 'ʐ'), + (0x1DBD, 'M', 'ʑ'), + (0x1DBE, 'M', 'ʒ'), + (0x1DBF, 'M', 'θ'), + (0x1DC0, 'V'), + (0x1E00, 'M', 'ḁ'), + (0x1E01, 'V'), + (0x1E02, 'M', 'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', 'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', 'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', 'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', 'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', 'ḍ'), + (0x1E0D, 'V'), + (0x1E0E, 'M', 'ḏ'), + (0x1E0F, 'V'), + (0x1E10, 'M', 'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', 'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', 'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', 'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', 'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', 'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', 'ḝ'), + (0x1E1D, 'V'), + (0x1E1E, 'M', 'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', 'ḡ'), + (0x1E21, 'V'), + (0x1E22, 'M', 'ḣ'), + (0x1E23, 'V'), + ] + +def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E24, 'M', 'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', 'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', 'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', 'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', 'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', 'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', 'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', 'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', 'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', 'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', 'ḹ'), + (0x1E39, 'V'), + (0x1E3A, 'M', 'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', 'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', 'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', 'ṁ'), + (0x1E41, 'V'), + (0x1E42, 'M', 'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', 'ṅ'), + (0x1E45, 'V'), + (0x1E46, 'M', 'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', 'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', 'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', 'ṍ'), + (0x1E4D, 'V'), + (0x1E4E, 'M', 'ṏ'), + (0x1E4F, 'V'), + (0x1E50, 'M', 'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', 'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', 'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', 'ṗ'), + (0x1E57, 'V'), + (0x1E58, 'M', 'ṙ'), + (0x1E59, 'V'), + (0x1E5A, 'M', 'ṛ'), + (0x1E5B, 'V'), + (0x1E5C, 'M', 'ṝ'), + (0x1E5D, 'V'), + (0x1E5E, 'M', 'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', 'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', 'ṣ'), + (0x1E63, 'V'), + (0x1E64, 'M', 'ṥ'), + (0x1E65, 'V'), + (0x1E66, 'M', 'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', 'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', 'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', 'ṭ'), + (0x1E6D, 'V'), + (0x1E6E, 'M', 'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', 'ṱ'), + (0x1E71, 'V'), + (0x1E72, 'M', 'ṳ'), + (0x1E73, 'V'), + (0x1E74, 'M', 'ṵ'), + (0x1E75, 'V'), + (0x1E76, 'M', 'ṷ'), + (0x1E77, 'V'), + (0x1E78, 'M', 'ṹ'), + (0x1E79, 'V'), + (0x1E7A, 'M', 'ṻ'), + (0x1E7B, 'V'), + (0x1E7C, 'M', 'ṽ'), + (0x1E7D, 'V'), + (0x1E7E, 'M', 'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', 'ẁ'), + (0x1E81, 'V'), + (0x1E82, 'M', 'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', 'ẅ'), + (0x1E85, 'V'), + (0x1E86, 'M', 'ẇ'), + (0x1E87, 'V'), + ] + +def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E88, 'M', 'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', 'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', 'ẍ'), + (0x1E8D, 'V'), + (0x1E8E, 'M', 'ẏ'), + (0x1E8F, 'V'), + (0x1E90, 'M', 'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', 'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', 'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', 'aʾ'), + (0x1E9B, 'M', 'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', 'ss'), + (0x1E9F, 'V'), + (0x1EA0, 'M', 'ạ'), + (0x1EA1, 'V'), + (0x1EA2, 'M', 'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', 'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', 'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', 'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', 'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', 'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', 'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', 'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', 'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', 'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', 'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', 'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', 'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', 'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', 'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', 'ề'), + (0x1EC1, 'V'), + (0x1EC2, 'M', 'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', 'ễ'), + (0x1EC5, 'V'), + (0x1EC6, 'M', 'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', 'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', 'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', 'ọ'), + (0x1ECD, 'V'), + (0x1ECE, 'M', 'ỏ'), + (0x1ECF, 'V'), + (0x1ED0, 'M', 'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', 'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', 'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', 'ỗ'), + (0x1ED7, 'V'), + (0x1ED8, 'M', 'ộ'), + (0x1ED9, 'V'), + (0x1EDA, 'M', 'ớ'), + (0x1EDB, 'V'), + (0x1EDC, 'M', 'ờ'), + (0x1EDD, 'V'), + (0x1EDE, 'M', 'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', 'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', 'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', 'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', 'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', 'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', 'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', 'ử'), + (0x1EED, 'V'), + (0x1EEE, 'M', 'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', 'ự'), + ] + +def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EF1, 'V'), + (0x1EF2, 'M', 'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', 'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', 'ỷ'), + (0x1EF7, 'V'), + (0x1EF8, 'M', 'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', 'ỻ'), + (0x1EFB, 'V'), + (0x1EFC, 'M', 'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', 'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', 'ἀ'), + (0x1F09, 'M', 'ἁ'), + (0x1F0A, 'M', 'ἂ'), + (0x1F0B, 'M', 'ἃ'), + (0x1F0C, 'M', 'ἄ'), + (0x1F0D, 'M', 'ἅ'), + (0x1F0E, 'M', 'ἆ'), + (0x1F0F, 'M', 'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', 'ἐ'), + (0x1F19, 'M', 'ἑ'), + (0x1F1A, 'M', 'ἒ'), + (0x1F1B, 'M', 'ἓ'), + (0x1F1C, 'M', 'ἔ'), + (0x1F1D, 'M', 'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', 'ἠ'), + (0x1F29, 'M', 'ἡ'), + (0x1F2A, 'M', 'ἢ'), + (0x1F2B, 'M', 'ἣ'), + (0x1F2C, 'M', 'ἤ'), + (0x1F2D, 'M', 'ἥ'), + (0x1F2E, 'M', 'ἦ'), + (0x1F2F, 'M', 'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', 'ἰ'), + (0x1F39, 'M', 'ἱ'), + (0x1F3A, 'M', 'ἲ'), + (0x1F3B, 'M', 'ἳ'), + (0x1F3C, 'M', 'ἴ'), + (0x1F3D, 'M', 'ἵ'), + (0x1F3E, 'M', 'ἶ'), + (0x1F3F, 'M', 'ἷ'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', 'ὀ'), + (0x1F49, 'M', 'ὁ'), + (0x1F4A, 'M', 'ὂ'), + (0x1F4B, 'M', 'ὃ'), + (0x1F4C, 'M', 'ὄ'), + (0x1F4D, 'M', 'ὅ'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', 'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', 'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', 'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', 'ὗ'), + (0x1F60, 'V'), + (0x1F68, 'M', 'ὠ'), + (0x1F69, 'M', 'ὡ'), + (0x1F6A, 'M', 'ὢ'), + (0x1F6B, 'M', 'ὣ'), + (0x1F6C, 'M', 'ὤ'), + (0x1F6D, 'M', 'ὥ'), + (0x1F6E, 'M', 'ὦ'), + (0x1F6F, 'M', 'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', 'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', 'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', 'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', 'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', 'ό'), + (0x1F7A, 'V'), + (0x1F7B, 'M', 'ύ'), + (0x1F7C, 'V'), + (0x1F7D, 'M', 'ώ'), + (0x1F7E, 'X'), + (0x1F80, 'M', 'ἀι'), + (0x1F81, 'M', 'ἁι'), + (0x1F82, 'M', 'ἂι'), + (0x1F83, 'M', 'ἃι'), + (0x1F84, 'M', 'ἄι'), + (0x1F85, 'M', 'ἅι'), + (0x1F86, 'M', 'ἆι'), + (0x1F87, 'M', 'ἇι'), + ] + +def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F88, 'M', 'ἀι'), + (0x1F89, 'M', 'ἁι'), + (0x1F8A, 'M', 'ἂι'), + (0x1F8B, 'M', 'ἃι'), + (0x1F8C, 'M', 'ἄι'), + (0x1F8D, 'M', 'ἅι'), + (0x1F8E, 'M', 'ἆι'), + (0x1F8F, 'M', 'ἇι'), + (0x1F90, 'M', 'ἠι'), + (0x1F91, 'M', 'ἡι'), + (0x1F92, 'M', 'ἢι'), + (0x1F93, 'M', 'ἣι'), + (0x1F94, 'M', 'ἤι'), + (0x1F95, 'M', 'ἥι'), + (0x1F96, 'M', 'ἦι'), + (0x1F97, 'M', 'ἧι'), + (0x1F98, 'M', 'ἠι'), + (0x1F99, 'M', 'ἡι'), + (0x1F9A, 'M', 'ἢι'), + (0x1F9B, 'M', 'ἣι'), + (0x1F9C, 'M', 'ἤι'), + (0x1F9D, 'M', 'ἥι'), + (0x1F9E, 'M', 'ἦι'), + (0x1F9F, 'M', 'ἧι'), + (0x1FA0, 'M', 'ὠι'), + (0x1FA1, 'M', 'ὡι'), + (0x1FA2, 'M', 'ὢι'), + (0x1FA3, 'M', 'ὣι'), + (0x1FA4, 'M', 'ὤι'), + (0x1FA5, 'M', 'ὥι'), + (0x1FA6, 'M', 'ὦι'), + (0x1FA7, 'M', 'ὧι'), + (0x1FA8, 'M', 'ὠι'), + (0x1FA9, 'M', 'ὡι'), + (0x1FAA, 'M', 'ὢι'), + (0x1FAB, 'M', 'ὣι'), + (0x1FAC, 'M', 'ὤι'), + (0x1FAD, 'M', 'ὥι'), + (0x1FAE, 'M', 'ὦι'), + (0x1FAF, 'M', 'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', 'ὰι'), + (0x1FB3, 'M', 'αι'), + (0x1FB4, 'M', 'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', 'ᾶι'), + (0x1FB8, 'M', 'ᾰ'), + (0x1FB9, 'M', 'ᾱ'), + (0x1FBA, 'M', 'ὰ'), + (0x1FBB, 'M', 'ά'), + (0x1FBC, 'M', 'αι'), + (0x1FBD, '3', ' ̓'), + (0x1FBE, 'M', 'ι'), + (0x1FBF, '3', ' ̓'), + (0x1FC0, '3', ' ͂'), + (0x1FC1, '3', ' ̈͂'), + (0x1FC2, 'M', 'ὴι'), + (0x1FC3, 'M', 'ηι'), + (0x1FC4, 'M', 'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', 'ῆι'), + (0x1FC8, 'M', 'ὲ'), + (0x1FC9, 'M', 'έ'), + (0x1FCA, 'M', 'ὴ'), + (0x1FCB, 'M', 'ή'), + (0x1FCC, 'M', 'ηι'), + (0x1FCD, '3', ' ̓̀'), + (0x1FCE, '3', ' ̓́'), + (0x1FCF, '3', ' ̓͂'), + (0x1FD0, 'V'), + (0x1FD3, 'M', 'ΐ'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', 'ῐ'), + (0x1FD9, 'M', 'ῑ'), + (0x1FDA, 'M', 'ὶ'), + (0x1FDB, 'M', 'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', ' ̔̀'), + (0x1FDE, '3', ' ̔́'), + (0x1FDF, '3', ' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', 'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', 'ῠ'), + (0x1FE9, 'M', 'ῡ'), + (0x1FEA, 'M', 'ὺ'), + (0x1FEB, 'M', 'ύ'), + (0x1FEC, 'M', 'ῥ'), + (0x1FED, '3', ' ̈̀'), + (0x1FEE, '3', ' ̈́'), + (0x1FEF, '3', '`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', 'ὼι'), + (0x1FF3, 'M', 'ωι'), + (0x1FF4, 'M', 'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + ] + +def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FF7, 'M', 'ῶι'), + (0x1FF8, 'M', 'ὸ'), + (0x1FF9, 'M', 'ό'), + (0x1FFA, 'M', 'ὼ'), + (0x1FFB, 'M', 'ώ'), + (0x1FFC, 'M', 'ωι'), + (0x1FFD, '3', ' ́'), + (0x1FFE, '3', ' ̔'), + (0x1FFF, 'X'), + (0x2000, '3', ' '), + (0x200B, 'I'), + (0x200C, 'D', ''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', '‐'), + (0x2012, 'V'), + (0x2017, '3', ' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + (0x202F, '3', ' '), + (0x2030, 'V'), + (0x2033, 'M', '′′'), + (0x2034, 'M', '′′′'), + (0x2035, 'V'), + (0x2036, 'M', '‵‵'), + (0x2037, 'M', '‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', '!!'), + (0x203D, 'V'), + (0x203E, '3', ' ̅'), + (0x203F, 'V'), + (0x2047, '3', '??'), + (0x2048, '3', '?!'), + (0x2049, '3', '!?'), + (0x204A, 'V'), + (0x2057, 'M', '′′′′'), + (0x2058, 'V'), + (0x205F, '3', ' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', '0'), + (0x2071, 'M', 'i'), + (0x2072, 'X'), + (0x2074, 'M', '4'), + (0x2075, 'M', '5'), + (0x2076, 'M', '6'), + (0x2077, 'M', '7'), + (0x2078, 'M', '8'), + (0x2079, 'M', '9'), + (0x207A, '3', '+'), + (0x207B, 'M', '−'), + (0x207C, '3', '='), + (0x207D, '3', '('), + (0x207E, '3', ')'), + (0x207F, 'M', 'n'), + (0x2080, 'M', '0'), + (0x2081, 'M', '1'), + (0x2082, 'M', '2'), + (0x2083, 'M', '3'), + (0x2084, 'M', '4'), + (0x2085, 'M', '5'), + (0x2086, 'M', '6'), + (0x2087, 'M', '7'), + (0x2088, 'M', '8'), + (0x2089, 'M', '9'), + (0x208A, '3', '+'), + (0x208B, 'M', '−'), + (0x208C, '3', '='), + (0x208D, '3', '('), + (0x208E, '3', ')'), + (0x208F, 'X'), + (0x2090, 'M', 'a'), + (0x2091, 'M', 'e'), + (0x2092, 'M', 'o'), + (0x2093, 'M', 'x'), + (0x2094, 'M', 'ə'), + (0x2095, 'M', 'h'), + (0x2096, 'M', 'k'), + (0x2097, 'M', 'l'), + (0x2098, 'M', 'm'), + (0x2099, 'M', 'n'), + (0x209A, 'M', 'p'), + (0x209B, 'M', 's'), + (0x209C, 'M', 't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', 'rs'), + (0x20A9, 'V'), + (0x20C1, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', 'a/c'), + (0x2101, '3', 'a/s'), + (0x2102, 'M', 'c'), + (0x2103, 'M', '°c'), + (0x2104, 'V'), + ] + +def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2105, '3', 'c/o'), + (0x2106, '3', 'c/u'), + (0x2107, 'M', 'ɛ'), + (0x2108, 'V'), + (0x2109, 'M', '°f'), + (0x210A, 'M', 'g'), + (0x210B, 'M', 'h'), + (0x210F, 'M', 'ħ'), + (0x2110, 'M', 'i'), + (0x2112, 'M', 'l'), + (0x2114, 'V'), + (0x2115, 'M', 'n'), + (0x2116, 'M', 'no'), + (0x2117, 'V'), + (0x2119, 'M', 'p'), + (0x211A, 'M', 'q'), + (0x211B, 'M', 'r'), + (0x211E, 'V'), + (0x2120, 'M', 'sm'), + (0x2121, 'M', 'tel'), + (0x2122, 'M', 'tm'), + (0x2123, 'V'), + (0x2124, 'M', 'z'), + (0x2125, 'V'), + (0x2126, 'M', 'ω'), + (0x2127, 'V'), + (0x2128, 'M', 'z'), + (0x2129, 'V'), + (0x212A, 'M', 'k'), + (0x212B, 'M', 'å'), + (0x212C, 'M', 'b'), + (0x212D, 'M', 'c'), + (0x212E, 'V'), + (0x212F, 'M', 'e'), + (0x2131, 'M', 'f'), + (0x2132, 'X'), + (0x2133, 'M', 'm'), + (0x2134, 'M', 'o'), + (0x2135, 'M', 'א'), + (0x2136, 'M', 'ב'), + (0x2137, 'M', 'ג'), + (0x2138, 'M', 'ד'), + (0x2139, 'M', 'i'), + (0x213A, 'V'), + (0x213B, 'M', 'fax'), + (0x213C, 'M', 'π'), + (0x213D, 'M', 'γ'), + (0x213F, 'M', 'π'), + (0x2140, 'M', '∑'), + (0x2141, 'V'), + (0x2145, 'M', 'd'), + (0x2147, 'M', 'e'), + (0x2148, 'M', 'i'), + (0x2149, 'M', 'j'), + (0x214A, 'V'), + (0x2150, 'M', '1⁄7'), + (0x2151, 'M', '1⁄9'), + (0x2152, 'M', '1⁄10'), + (0x2153, 'M', '1⁄3'), + (0x2154, 'M', '2⁄3'), + (0x2155, 'M', '1⁄5'), + (0x2156, 'M', '2⁄5'), + (0x2157, 'M', '3⁄5'), + (0x2158, 'M', '4⁄5'), + (0x2159, 'M', '1⁄6'), + (0x215A, 'M', '5⁄6'), + (0x215B, 'M', '1⁄8'), + (0x215C, 'M', '3⁄8'), + (0x215D, 'M', '5⁄8'), + (0x215E, 'M', '7⁄8'), + (0x215F, 'M', '1⁄'), + (0x2160, 'M', 'i'), + (0x2161, 'M', 'ii'), + (0x2162, 'M', 'iii'), + (0x2163, 'M', 'iv'), + (0x2164, 'M', 'v'), + (0x2165, 'M', 'vi'), + (0x2166, 'M', 'vii'), + (0x2167, 'M', 'viii'), + (0x2168, 'M', 'ix'), + (0x2169, 'M', 'x'), + (0x216A, 'M', 'xi'), + (0x216B, 'M', 'xii'), + (0x216C, 'M', 'l'), + (0x216D, 'M', 'c'), + (0x216E, 'M', 'd'), + (0x216F, 'M', 'm'), + (0x2170, 'M', 'i'), + (0x2171, 'M', 'ii'), + (0x2172, 'M', 'iii'), + (0x2173, 'M', 'iv'), + (0x2174, 'M', 'v'), + (0x2175, 'M', 'vi'), + (0x2176, 'M', 'vii'), + (0x2177, 'M', 'viii'), + (0x2178, 'M', 'ix'), + (0x2179, 'M', 'x'), + (0x217A, 'M', 'xi'), + (0x217B, 'M', 'xii'), + (0x217C, 'M', 'l'), + ] + +def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x217D, 'M', 'c'), + (0x217E, 'M', 'd'), + (0x217F, 'M', 'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', '0⁄3'), + (0x218A, 'V'), + (0x218C, 'X'), + (0x2190, 'V'), + (0x222C, 'M', '∫∫'), + (0x222D, 'M', '∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', '∮∮'), + (0x2230, 'M', '∮∮∮'), + (0x2231, 'V'), + (0x2260, '3'), + (0x2261, 'V'), + (0x226E, '3'), + (0x2270, 'V'), + (0x2329, 'M', '〈'), + (0x232A, 'M', '〉'), + (0x232B, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', '1'), + (0x2461, 'M', '2'), + (0x2462, 'M', '3'), + (0x2463, 'M', '4'), + (0x2464, 'M', '5'), + (0x2465, 'M', '6'), + (0x2466, 'M', '7'), + (0x2467, 'M', '8'), + (0x2468, 'M', '9'), + (0x2469, 'M', '10'), + (0x246A, 'M', '11'), + (0x246B, 'M', '12'), + (0x246C, 'M', '13'), + (0x246D, 'M', '14'), + (0x246E, 'M', '15'), + (0x246F, 'M', '16'), + (0x2470, 'M', '17'), + (0x2471, 'M', '18'), + (0x2472, 'M', '19'), + (0x2473, 'M', '20'), + (0x2474, '3', '(1)'), + (0x2475, '3', '(2)'), + (0x2476, '3', '(3)'), + (0x2477, '3', '(4)'), + (0x2478, '3', '(5)'), + (0x2479, '3', '(6)'), + (0x247A, '3', '(7)'), + (0x247B, '3', '(8)'), + (0x247C, '3', '(9)'), + (0x247D, '3', '(10)'), + (0x247E, '3', '(11)'), + (0x247F, '3', '(12)'), + (0x2480, '3', '(13)'), + (0x2481, '3', '(14)'), + (0x2482, '3', '(15)'), + (0x2483, '3', '(16)'), + (0x2484, '3', '(17)'), + (0x2485, '3', '(18)'), + (0x2486, '3', '(19)'), + (0x2487, '3', '(20)'), + (0x2488, 'X'), + (0x249C, '3', '(a)'), + (0x249D, '3', '(b)'), + (0x249E, '3', '(c)'), + (0x249F, '3', '(d)'), + (0x24A0, '3', '(e)'), + (0x24A1, '3', '(f)'), + (0x24A2, '3', '(g)'), + (0x24A3, '3', '(h)'), + (0x24A4, '3', '(i)'), + (0x24A5, '3', '(j)'), + (0x24A6, '3', '(k)'), + (0x24A7, '3', '(l)'), + (0x24A8, '3', '(m)'), + (0x24A9, '3', '(n)'), + (0x24AA, '3', '(o)'), + (0x24AB, '3', '(p)'), + (0x24AC, '3', '(q)'), + (0x24AD, '3', '(r)'), + (0x24AE, '3', '(s)'), + (0x24AF, '3', '(t)'), + (0x24B0, '3', '(u)'), + (0x24B1, '3', '(v)'), + (0x24B2, '3', '(w)'), + (0x24B3, '3', '(x)'), + (0x24B4, '3', '(y)'), + (0x24B5, '3', '(z)'), + (0x24B6, 'M', 'a'), + (0x24B7, 'M', 'b'), + (0x24B8, 'M', 'c'), + (0x24B9, 'M', 'd'), + (0x24BA, 'M', 'e'), + (0x24BB, 'M', 'f'), + (0x24BC, 'M', 'g'), + ] + +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x24BD, 'M', 'h'), + (0x24BE, 'M', 'i'), + (0x24BF, 'M', 'j'), + (0x24C0, 'M', 'k'), + (0x24C1, 'M', 'l'), + (0x24C2, 'M', 'm'), + (0x24C3, 'M', 'n'), + (0x24C4, 'M', 'o'), + (0x24C5, 'M', 'p'), + (0x24C6, 'M', 'q'), + (0x24C7, 'M', 'r'), + (0x24C8, 'M', 's'), + (0x24C9, 'M', 't'), + (0x24CA, 'M', 'u'), + (0x24CB, 'M', 'v'), + (0x24CC, 'M', 'w'), + (0x24CD, 'M', 'x'), + (0x24CE, 'M', 'y'), + (0x24CF, 'M', 'z'), + (0x24D0, 'M', 'a'), + (0x24D1, 'M', 'b'), + (0x24D2, 'M', 'c'), + (0x24D3, 'M', 'd'), + (0x24D4, 'M', 'e'), + (0x24D5, 'M', 'f'), + (0x24D6, 'M', 'g'), + (0x24D7, 'M', 'h'), + (0x24D8, 'M', 'i'), + (0x24D9, 'M', 'j'), + (0x24DA, 'M', 'k'), + (0x24DB, 'M', 'l'), + (0x24DC, 'M', 'm'), + (0x24DD, 'M', 'n'), + (0x24DE, 'M', 'o'), + (0x24DF, 'M', 'p'), + (0x24E0, 'M', 'q'), + (0x24E1, 'M', 'r'), + (0x24E2, 'M', 's'), + (0x24E3, 'M', 't'), + (0x24E4, 'M', 'u'), + (0x24E5, 'M', 'v'), + (0x24E6, 'M', 'w'), + (0x24E7, 'M', 'x'), + (0x24E8, 'M', 'y'), + (0x24E9, 'M', 'z'), + (0x24EA, 'M', '0'), + (0x24EB, 'V'), + (0x2A0C, 'M', '∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', '::='), + (0x2A75, '3', '=='), + (0x2A76, '3', '==='), + (0x2A77, 'V'), + (0x2ADC, 'M', '⫝̸'), + (0x2ADD, 'V'), + (0x2B74, 'X'), + (0x2B76, 'V'), + (0x2B96, 'X'), + (0x2B97, 'V'), + (0x2C00, 'M', 'ⰰ'), + (0x2C01, 'M', 'ⰱ'), + (0x2C02, 'M', 'ⰲ'), + (0x2C03, 'M', 'ⰳ'), + (0x2C04, 'M', 'ⰴ'), + (0x2C05, 'M', 'ⰵ'), + (0x2C06, 'M', 'ⰶ'), + (0x2C07, 'M', 'ⰷ'), + (0x2C08, 'M', 'ⰸ'), + (0x2C09, 'M', 'ⰹ'), + (0x2C0A, 'M', 'ⰺ'), + (0x2C0B, 'M', 'ⰻ'), + (0x2C0C, 'M', 'ⰼ'), + (0x2C0D, 'M', 'ⰽ'), + (0x2C0E, 'M', 'ⰾ'), + (0x2C0F, 'M', 'ⰿ'), + (0x2C10, 'M', 'ⱀ'), + (0x2C11, 'M', 'ⱁ'), + (0x2C12, 'M', 'ⱂ'), + (0x2C13, 'M', 'ⱃ'), + (0x2C14, 'M', 'ⱄ'), + (0x2C15, 'M', 'ⱅ'), + (0x2C16, 'M', 'ⱆ'), + (0x2C17, 'M', 'ⱇ'), + (0x2C18, 'M', 'ⱈ'), + (0x2C19, 'M', 'ⱉ'), + (0x2C1A, 'M', 'ⱊ'), + (0x2C1B, 'M', 'ⱋ'), + (0x2C1C, 'M', 'ⱌ'), + (0x2C1D, 'M', 'ⱍ'), + (0x2C1E, 'M', 'ⱎ'), + (0x2C1F, 'M', 'ⱏ'), + (0x2C20, 'M', 'ⱐ'), + (0x2C21, 'M', 'ⱑ'), + (0x2C22, 'M', 'ⱒ'), + (0x2C23, 'M', 'ⱓ'), + (0x2C24, 'M', 'ⱔ'), + (0x2C25, 'M', 'ⱕ'), + (0x2C26, 'M', 'ⱖ'), + (0x2C27, 'M', 'ⱗ'), + (0x2C28, 'M', 'ⱘ'), + ] + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2C29, 'M', 'ⱙ'), + (0x2C2A, 'M', 'ⱚ'), + (0x2C2B, 'M', 'ⱛ'), + (0x2C2C, 'M', 'ⱜ'), + (0x2C2D, 'M', 'ⱝ'), + (0x2C2E, 'M', 'ⱞ'), + (0x2C2F, 'M', 'ⱟ'), + (0x2C30, 'V'), + (0x2C60, 'M', 'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', 'ɫ'), + (0x2C63, 'M', 'ᵽ'), + (0x2C64, 'M', 'ɽ'), + (0x2C65, 'V'), + (0x2C67, 'M', 'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', 'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', 'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', 'ɑ'), + (0x2C6E, 'M', 'ɱ'), + (0x2C6F, 'M', 'ɐ'), + (0x2C70, 'M', 'ɒ'), + (0x2C71, 'V'), + (0x2C72, 'M', 'ⱳ'), + (0x2C73, 'V'), + (0x2C75, 'M', 'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', 'j'), + (0x2C7D, 'M', 'v'), + (0x2C7E, 'M', 'ȿ'), + (0x2C7F, 'M', 'ɀ'), + (0x2C80, 'M', 'ⲁ'), + (0x2C81, 'V'), + (0x2C82, 'M', 'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', 'ⲅ'), + (0x2C85, 'V'), + (0x2C86, 'M', 'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', 'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', 'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', 'ⲍ'), + (0x2C8D, 'V'), + (0x2C8E, 'M', 'ⲏ'), + (0x2C8F, 'V'), + (0x2C90, 'M', 'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', 'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', 'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', 'ⲗ'), + (0x2C97, 'V'), + (0x2C98, 'M', 'ⲙ'), + (0x2C99, 'V'), + (0x2C9A, 'M', 'ⲛ'), + (0x2C9B, 'V'), + (0x2C9C, 'M', 'ⲝ'), + (0x2C9D, 'V'), + (0x2C9E, 'M', 'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', 'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', 'ⲣ'), + (0x2CA3, 'V'), + (0x2CA4, 'M', 'ⲥ'), + (0x2CA5, 'V'), + (0x2CA6, 'M', 'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', 'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', 'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', 'ⲭ'), + (0x2CAD, 'V'), + (0x2CAE, 'M', 'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', 'ⲱ'), + (0x2CB1, 'V'), + (0x2CB2, 'M', 'ⲳ'), + (0x2CB3, 'V'), + (0x2CB4, 'M', 'ⲵ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', 'ⲷ'), + (0x2CB7, 'V'), + (0x2CB8, 'M', 'ⲹ'), + (0x2CB9, 'V'), + (0x2CBA, 'M', 'ⲻ'), + (0x2CBB, 'V'), + (0x2CBC, 'M', 'ⲽ'), + (0x2CBD, 'V'), + (0x2CBE, 'M', 'ⲿ'), + (0x2CBF, 'V'), + (0x2CC0, 'M', 'ⳁ'), + (0x2CC1, 'V'), + (0x2CC2, 'M', 'ⳃ'), + ] + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2CC3, 'V'), + (0x2CC4, 'M', 'ⳅ'), + (0x2CC5, 'V'), + (0x2CC6, 'M', 'ⳇ'), + (0x2CC7, 'V'), + (0x2CC8, 'M', 'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', 'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', 'ⳍ'), + (0x2CCD, 'V'), + (0x2CCE, 'M', 'ⳏ'), + (0x2CCF, 'V'), + (0x2CD0, 'M', 'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', 'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', 'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', 'ⳗ'), + (0x2CD7, 'V'), + (0x2CD8, 'M', 'ⳙ'), + (0x2CD9, 'V'), + (0x2CDA, 'M', 'ⳛ'), + (0x2CDB, 'V'), + (0x2CDC, 'M', 'ⳝ'), + (0x2CDD, 'V'), + (0x2CDE, 'M', 'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', 'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', 'ⳣ'), + (0x2CE3, 'V'), + (0x2CEB, 'M', 'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', 'ⳮ'), + (0x2CEE, 'V'), + (0x2CF2, 'M', 'ⳳ'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', 'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E5E, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', '母'), + (0x2EA0, 'V'), + (0x2EF3, 'M', '龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', '一'), + (0x2F01, 'M', '丨'), + (0x2F02, 'M', '丶'), + (0x2F03, 'M', '丿'), + (0x2F04, 'M', '乙'), + (0x2F05, 'M', '亅'), + (0x2F06, 'M', '二'), + (0x2F07, 'M', '亠'), + (0x2F08, 'M', '人'), + (0x2F09, 'M', '儿'), + (0x2F0A, 'M', '入'), + (0x2F0B, 'M', '八'), + (0x2F0C, 'M', '冂'), + (0x2F0D, 'M', '冖'), + (0x2F0E, 'M', '冫'), + (0x2F0F, 'M', '几'), + (0x2F10, 'M', '凵'), + (0x2F11, 'M', '刀'), + (0x2F12, 'M', '力'), + (0x2F13, 'M', '勹'), + (0x2F14, 'M', '匕'), + (0x2F15, 'M', '匚'), + ] + +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F16, 'M', '匸'), + (0x2F17, 'M', '十'), + (0x2F18, 'M', '卜'), + (0x2F19, 'M', '卩'), + (0x2F1A, 'M', '厂'), + (0x2F1B, 'M', '厶'), + (0x2F1C, 'M', '又'), + (0x2F1D, 'M', '口'), + (0x2F1E, 'M', '囗'), + (0x2F1F, 'M', '土'), + (0x2F20, 'M', '士'), + (0x2F21, 'M', '夂'), + (0x2F22, 'M', '夊'), + (0x2F23, 'M', '夕'), + (0x2F24, 'M', '大'), + (0x2F25, 'M', '女'), + (0x2F26, 'M', '子'), + (0x2F27, 'M', '宀'), + (0x2F28, 'M', '寸'), + (0x2F29, 'M', '小'), + (0x2F2A, 'M', '尢'), + (0x2F2B, 'M', '尸'), + (0x2F2C, 'M', '屮'), + (0x2F2D, 'M', '山'), + (0x2F2E, 'M', '巛'), + (0x2F2F, 'M', '工'), + (0x2F30, 'M', '己'), + (0x2F31, 'M', '巾'), + (0x2F32, 'M', '干'), + (0x2F33, 'M', '幺'), + (0x2F34, 'M', '广'), + (0x2F35, 'M', '廴'), + (0x2F36, 'M', '廾'), + (0x2F37, 'M', '弋'), + (0x2F38, 'M', '弓'), + (0x2F39, 'M', '彐'), + (0x2F3A, 'M', '彡'), + (0x2F3B, 'M', '彳'), + (0x2F3C, 'M', '心'), + (0x2F3D, 'M', '戈'), + (0x2F3E, 'M', '戶'), + (0x2F3F, 'M', '手'), + (0x2F40, 'M', '支'), + (0x2F41, 'M', '攴'), + (0x2F42, 'M', '文'), + (0x2F43, 'M', '斗'), + (0x2F44, 'M', '斤'), + (0x2F45, 'M', '方'), + (0x2F46, 'M', '无'), + (0x2F47, 'M', '日'), + (0x2F48, 'M', '曰'), + (0x2F49, 'M', '月'), + (0x2F4A, 'M', '木'), + (0x2F4B, 'M', '欠'), + (0x2F4C, 'M', '止'), + (0x2F4D, 'M', '歹'), + (0x2F4E, 'M', '殳'), + (0x2F4F, 'M', '毋'), + (0x2F50, 'M', '比'), + (0x2F51, 'M', '毛'), + (0x2F52, 'M', '氏'), + (0x2F53, 'M', '气'), + (0x2F54, 'M', '水'), + (0x2F55, 'M', '火'), + (0x2F56, 'M', '爪'), + (0x2F57, 'M', '父'), + (0x2F58, 'M', '爻'), + (0x2F59, 'M', '爿'), + (0x2F5A, 'M', '片'), + (0x2F5B, 'M', '牙'), + (0x2F5C, 'M', '牛'), + (0x2F5D, 'M', '犬'), + (0x2F5E, 'M', '玄'), + (0x2F5F, 'M', '玉'), + (0x2F60, 'M', '瓜'), + (0x2F61, 'M', '瓦'), + (0x2F62, 'M', '甘'), + (0x2F63, 'M', '生'), + (0x2F64, 'M', '用'), + (0x2F65, 'M', '田'), + (0x2F66, 'M', '疋'), + (0x2F67, 'M', '疒'), + (0x2F68, 'M', '癶'), + (0x2F69, 'M', '白'), + (0x2F6A, 'M', '皮'), + (0x2F6B, 'M', '皿'), + (0x2F6C, 'M', '目'), + (0x2F6D, 'M', '矛'), + (0x2F6E, 'M', '矢'), + (0x2F6F, 'M', '石'), + (0x2F70, 'M', '示'), + (0x2F71, 'M', '禸'), + (0x2F72, 'M', '禾'), + (0x2F73, 'M', '穴'), + (0x2F74, 'M', '立'), + (0x2F75, 'M', '竹'), + (0x2F76, 'M', '米'), + (0x2F77, 'M', '糸'), + (0x2F78, 'M', '缶'), + (0x2F79, 'M', '网'), + ] + +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F7A, 'M', '羊'), + (0x2F7B, 'M', '羽'), + (0x2F7C, 'M', '老'), + (0x2F7D, 'M', '而'), + (0x2F7E, 'M', '耒'), + (0x2F7F, 'M', '耳'), + (0x2F80, 'M', '聿'), + (0x2F81, 'M', '肉'), + (0x2F82, 'M', '臣'), + (0x2F83, 'M', '自'), + (0x2F84, 'M', '至'), + (0x2F85, 'M', '臼'), + (0x2F86, 'M', '舌'), + (0x2F87, 'M', '舛'), + (0x2F88, 'M', '舟'), + (0x2F89, 'M', '艮'), + (0x2F8A, 'M', '色'), + (0x2F8B, 'M', '艸'), + (0x2F8C, 'M', '虍'), + (0x2F8D, 'M', '虫'), + (0x2F8E, 'M', '血'), + (0x2F8F, 'M', '行'), + (0x2F90, 'M', '衣'), + (0x2F91, 'M', '襾'), + (0x2F92, 'M', '見'), + (0x2F93, 'M', '角'), + (0x2F94, 'M', '言'), + (0x2F95, 'M', '谷'), + (0x2F96, 'M', '豆'), + (0x2F97, 'M', '豕'), + (0x2F98, 'M', '豸'), + (0x2F99, 'M', '貝'), + (0x2F9A, 'M', '赤'), + (0x2F9B, 'M', '走'), + (0x2F9C, 'M', '足'), + (0x2F9D, 'M', '身'), + (0x2F9E, 'M', '車'), + (0x2F9F, 'M', '辛'), + (0x2FA0, 'M', '辰'), + (0x2FA1, 'M', '辵'), + (0x2FA2, 'M', '邑'), + (0x2FA3, 'M', '酉'), + (0x2FA4, 'M', '釆'), + (0x2FA5, 'M', '里'), + (0x2FA6, 'M', '金'), + (0x2FA7, 'M', '長'), + (0x2FA8, 'M', '門'), + (0x2FA9, 'M', '阜'), + (0x2FAA, 'M', '隶'), + (0x2FAB, 'M', '隹'), + (0x2FAC, 'M', '雨'), + (0x2FAD, 'M', '靑'), + (0x2FAE, 'M', '非'), + (0x2FAF, 'M', '面'), + (0x2FB0, 'M', '革'), + (0x2FB1, 'M', '韋'), + (0x2FB2, 'M', '韭'), + (0x2FB3, 'M', '音'), + (0x2FB4, 'M', '頁'), + (0x2FB5, 'M', '風'), + (0x2FB6, 'M', '飛'), + (0x2FB7, 'M', '食'), + (0x2FB8, 'M', '首'), + (0x2FB9, 'M', '香'), + (0x2FBA, 'M', '馬'), + (0x2FBB, 'M', '骨'), + (0x2FBC, 'M', '高'), + (0x2FBD, 'M', '髟'), + (0x2FBE, 'M', '鬥'), + (0x2FBF, 'M', '鬯'), + (0x2FC0, 'M', '鬲'), + (0x2FC1, 'M', '鬼'), + (0x2FC2, 'M', '魚'), + (0x2FC3, 'M', '鳥'), + (0x2FC4, 'M', '鹵'), + (0x2FC5, 'M', '鹿'), + (0x2FC6, 'M', '麥'), + (0x2FC7, 'M', '麻'), + (0x2FC8, 'M', '黃'), + (0x2FC9, 'M', '黍'), + (0x2FCA, 'M', '黑'), + (0x2FCB, 'M', '黹'), + (0x2FCC, 'M', '黽'), + (0x2FCD, 'M', '鼎'), + (0x2FCE, 'M', '鼓'), + (0x2FCF, 'M', '鼠'), + (0x2FD0, 'M', '鼻'), + (0x2FD1, 'M', '齊'), + (0x2FD2, 'M', '齒'), + (0x2FD3, 'M', '龍'), + (0x2FD4, 'M', '龜'), + (0x2FD5, 'M', '龠'), + (0x2FD6, 'X'), + (0x3000, '3', ' '), + (0x3001, 'V'), + (0x3002, 'M', '.'), + (0x3003, 'V'), + (0x3036, 'M', '〒'), + (0x3037, 'V'), + (0x3038, 'M', '十'), + ] + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3039, 'M', '卄'), + (0x303A, 'M', '卅'), + (0x303B, 'V'), + (0x3040, 'X'), + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', ' ゙'), + (0x309C, '3', ' ゚'), + (0x309D, 'V'), + (0x309F, 'M', 'より'), + (0x30A0, 'V'), + (0x30FF, 'M', 'コト'), + (0x3100, 'X'), + (0x3105, 'V'), + (0x3130, 'X'), + (0x3131, 'M', 'ᄀ'), + (0x3132, 'M', 'ᄁ'), + (0x3133, 'M', 'ᆪ'), + (0x3134, 'M', 'ᄂ'), + (0x3135, 'M', 'ᆬ'), + (0x3136, 'M', 'ᆭ'), + (0x3137, 'M', 'ᄃ'), + (0x3138, 'M', 'ᄄ'), + (0x3139, 'M', 'ᄅ'), + (0x313A, 'M', 'ᆰ'), + (0x313B, 'M', 'ᆱ'), + (0x313C, 'M', 'ᆲ'), + (0x313D, 'M', 'ᆳ'), + (0x313E, 'M', 'ᆴ'), + (0x313F, 'M', 'ᆵ'), + (0x3140, 'M', 'ᄚ'), + (0x3141, 'M', 'ᄆ'), + (0x3142, 'M', 'ᄇ'), + (0x3143, 'M', 'ᄈ'), + (0x3144, 'M', 'ᄡ'), + (0x3145, 'M', 'ᄉ'), + (0x3146, 'M', 'ᄊ'), + (0x3147, 'M', 'ᄋ'), + (0x3148, 'M', 'ᄌ'), + (0x3149, 'M', 'ᄍ'), + (0x314A, 'M', 'ᄎ'), + (0x314B, 'M', 'ᄏ'), + (0x314C, 'M', 'ᄐ'), + (0x314D, 'M', 'ᄑ'), + (0x314E, 'M', 'ᄒ'), + (0x314F, 'M', 'ᅡ'), + (0x3150, 'M', 'ᅢ'), + (0x3151, 'M', 'ᅣ'), + (0x3152, 'M', 'ᅤ'), + (0x3153, 'M', 'ᅥ'), + (0x3154, 'M', 'ᅦ'), + (0x3155, 'M', 'ᅧ'), + (0x3156, 'M', 'ᅨ'), + (0x3157, 'M', 'ᅩ'), + (0x3158, 'M', 'ᅪ'), + (0x3159, 'M', 'ᅫ'), + (0x315A, 'M', 'ᅬ'), + (0x315B, 'M', 'ᅭ'), + (0x315C, 'M', 'ᅮ'), + (0x315D, 'M', 'ᅯ'), + (0x315E, 'M', 'ᅰ'), + (0x315F, 'M', 'ᅱ'), + (0x3160, 'M', 'ᅲ'), + (0x3161, 'M', 'ᅳ'), + (0x3162, 'M', 'ᅴ'), + (0x3163, 'M', 'ᅵ'), + (0x3164, 'X'), + (0x3165, 'M', 'ᄔ'), + (0x3166, 'M', 'ᄕ'), + (0x3167, 'M', 'ᇇ'), + (0x3168, 'M', 'ᇈ'), + (0x3169, 'M', 'ᇌ'), + (0x316A, 'M', 'ᇎ'), + (0x316B, 'M', 'ᇓ'), + (0x316C, 'M', 'ᇗ'), + (0x316D, 'M', 'ᇙ'), + (0x316E, 'M', 'ᄜ'), + (0x316F, 'M', 'ᇝ'), + (0x3170, 'M', 'ᇟ'), + (0x3171, 'M', 'ᄝ'), + (0x3172, 'M', 'ᄞ'), + (0x3173, 'M', 'ᄠ'), + (0x3174, 'M', 'ᄢ'), + (0x3175, 'M', 'ᄣ'), + (0x3176, 'M', 'ᄧ'), + (0x3177, 'M', 'ᄩ'), + (0x3178, 'M', 'ᄫ'), + (0x3179, 'M', 'ᄬ'), + (0x317A, 'M', 'ᄭ'), + (0x317B, 'M', 'ᄮ'), + (0x317C, 'M', 'ᄯ'), + (0x317D, 'M', 'ᄲ'), + (0x317E, 'M', 'ᄶ'), + (0x317F, 'M', 'ᅀ'), + (0x3180, 'M', 'ᅇ'), + (0x3181, 'M', 'ᅌ'), + (0x3182, 'M', 'ᇱ'), + (0x3183, 'M', 'ᇲ'), + (0x3184, 'M', 'ᅗ'), + ] + +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3185, 'M', 'ᅘ'), + (0x3186, 'M', 'ᅙ'), + (0x3187, 'M', 'ᆄ'), + (0x3188, 'M', 'ᆅ'), + (0x3189, 'M', 'ᆈ'), + (0x318A, 'M', 'ᆑ'), + (0x318B, 'M', 'ᆒ'), + (0x318C, 'M', 'ᆔ'), + (0x318D, 'M', 'ᆞ'), + (0x318E, 'M', 'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', '一'), + (0x3193, 'M', '二'), + (0x3194, 'M', '三'), + (0x3195, 'M', '四'), + (0x3196, 'M', '上'), + (0x3197, 'M', '中'), + (0x3198, 'M', '下'), + (0x3199, 'M', '甲'), + (0x319A, 'M', '乙'), + (0x319B, 'M', '丙'), + (0x319C, 'M', '丁'), + (0x319D, 'M', '天'), + (0x319E, 'M', '地'), + (0x319F, 'M', '人'), + (0x31A0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', '(ᄀ)'), + (0x3201, '3', '(ᄂ)'), + (0x3202, '3', '(ᄃ)'), + (0x3203, '3', '(ᄅ)'), + (0x3204, '3', '(ᄆ)'), + (0x3205, '3', '(ᄇ)'), + (0x3206, '3', '(ᄉ)'), + (0x3207, '3', '(ᄋ)'), + (0x3208, '3', '(ᄌ)'), + (0x3209, '3', '(ᄎ)'), + (0x320A, '3', '(ᄏ)'), + (0x320B, '3', '(ᄐ)'), + (0x320C, '3', '(ᄑ)'), + (0x320D, '3', '(ᄒ)'), + (0x320E, '3', '(가)'), + (0x320F, '3', '(나)'), + (0x3210, '3', '(다)'), + (0x3211, '3', '(라)'), + (0x3212, '3', '(마)'), + (0x3213, '3', '(바)'), + (0x3214, '3', '(사)'), + (0x3215, '3', '(아)'), + (0x3216, '3', '(자)'), + (0x3217, '3', '(차)'), + (0x3218, '3', '(카)'), + (0x3219, '3', '(타)'), + (0x321A, '3', '(파)'), + (0x321B, '3', '(하)'), + (0x321C, '3', '(주)'), + (0x321D, '3', '(오전)'), + (0x321E, '3', '(오후)'), + (0x321F, 'X'), + (0x3220, '3', '(一)'), + (0x3221, '3', '(二)'), + (0x3222, '3', '(三)'), + (0x3223, '3', '(四)'), + (0x3224, '3', '(五)'), + (0x3225, '3', '(六)'), + (0x3226, '3', '(七)'), + (0x3227, '3', '(八)'), + (0x3228, '3', '(九)'), + (0x3229, '3', '(十)'), + (0x322A, '3', '(月)'), + (0x322B, '3', '(火)'), + (0x322C, '3', '(水)'), + (0x322D, '3', '(木)'), + (0x322E, '3', '(金)'), + (0x322F, '3', '(土)'), + (0x3230, '3', '(日)'), + (0x3231, '3', '(株)'), + (0x3232, '3', '(有)'), + (0x3233, '3', '(社)'), + (0x3234, '3', '(名)'), + (0x3235, '3', '(特)'), + (0x3236, '3', '(財)'), + (0x3237, '3', '(祝)'), + (0x3238, '3', '(労)'), + (0x3239, '3', '(代)'), + (0x323A, '3', '(呼)'), + (0x323B, '3', '(学)'), + (0x323C, '3', '(監)'), + (0x323D, '3', '(企)'), + (0x323E, '3', '(資)'), + (0x323F, '3', '(協)'), + (0x3240, '3', '(祭)'), + (0x3241, '3', '(休)'), + (0x3242, '3', '(自)'), + (0x3243, '3', '(至)'), + (0x3244, 'M', '問'), + (0x3245, 'M', '幼'), + (0x3246, 'M', '文'), + ] + +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3247, 'M', '箏'), + (0x3248, 'V'), + (0x3250, 'M', 'pte'), + (0x3251, 'M', '21'), + (0x3252, 'M', '22'), + (0x3253, 'M', '23'), + (0x3254, 'M', '24'), + (0x3255, 'M', '25'), + (0x3256, 'M', '26'), + (0x3257, 'M', '27'), + (0x3258, 'M', '28'), + (0x3259, 'M', '29'), + (0x325A, 'M', '30'), + (0x325B, 'M', '31'), + (0x325C, 'M', '32'), + (0x325D, 'M', '33'), + (0x325E, 'M', '34'), + (0x325F, 'M', '35'), + (0x3260, 'M', 'ᄀ'), + (0x3261, 'M', 'ᄂ'), + (0x3262, 'M', 'ᄃ'), + (0x3263, 'M', 'ᄅ'), + (0x3264, 'M', 'ᄆ'), + (0x3265, 'M', 'ᄇ'), + (0x3266, 'M', 'ᄉ'), + (0x3267, 'M', 'ᄋ'), + (0x3268, 'M', 'ᄌ'), + (0x3269, 'M', 'ᄎ'), + (0x326A, 'M', 'ᄏ'), + (0x326B, 'M', 'ᄐ'), + (0x326C, 'M', 'ᄑ'), + (0x326D, 'M', 'ᄒ'), + (0x326E, 'M', '가'), + (0x326F, 'M', '나'), + (0x3270, 'M', '다'), + (0x3271, 'M', '라'), + (0x3272, 'M', '마'), + (0x3273, 'M', '바'), + (0x3274, 'M', '사'), + (0x3275, 'M', '아'), + (0x3276, 'M', '자'), + (0x3277, 'M', '차'), + (0x3278, 'M', '카'), + (0x3279, 'M', '타'), + (0x327A, 'M', '파'), + (0x327B, 'M', '하'), + (0x327C, 'M', '참고'), + (0x327D, 'M', '주의'), + (0x327E, 'M', '우'), + (0x327F, 'V'), + (0x3280, 'M', '一'), + (0x3281, 'M', '二'), + (0x3282, 'M', '三'), + (0x3283, 'M', '四'), + (0x3284, 'M', '五'), + (0x3285, 'M', '六'), + (0x3286, 'M', '七'), + (0x3287, 'M', '八'), + (0x3288, 'M', '九'), + (0x3289, 'M', '十'), + (0x328A, 'M', '月'), + (0x328B, 'M', '火'), + (0x328C, 'M', '水'), + (0x328D, 'M', '木'), + (0x328E, 'M', '金'), + (0x328F, 'M', '土'), + (0x3290, 'M', '日'), + (0x3291, 'M', '株'), + (0x3292, 'M', '有'), + (0x3293, 'M', '社'), + (0x3294, 'M', '名'), + (0x3295, 'M', '特'), + (0x3296, 'M', '財'), + (0x3297, 'M', '祝'), + (0x3298, 'M', '労'), + (0x3299, 'M', '秘'), + (0x329A, 'M', '男'), + (0x329B, 'M', '女'), + (0x329C, 'M', '適'), + (0x329D, 'M', '優'), + (0x329E, 'M', '印'), + (0x329F, 'M', '注'), + (0x32A0, 'M', '項'), + (0x32A1, 'M', '休'), + (0x32A2, 'M', '写'), + (0x32A3, 'M', '正'), + (0x32A4, 'M', '上'), + (0x32A5, 'M', '中'), + (0x32A6, 'M', '下'), + (0x32A7, 'M', '左'), + (0x32A8, 'M', '右'), + (0x32A9, 'M', '医'), + (0x32AA, 'M', '宗'), + (0x32AB, 'M', '学'), + (0x32AC, 'M', '監'), + (0x32AD, 'M', '企'), + (0x32AE, 'M', '資'), + (0x32AF, 'M', '協'), + (0x32B0, 'M', '夜'), + (0x32B1, 'M', '36'), + ] + +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x32B2, 'M', '37'), + (0x32B3, 'M', '38'), + (0x32B4, 'M', '39'), + (0x32B5, 'M', '40'), + (0x32B6, 'M', '41'), + (0x32B7, 'M', '42'), + (0x32B8, 'M', '43'), + (0x32B9, 'M', '44'), + (0x32BA, 'M', '45'), + (0x32BB, 'M', '46'), + (0x32BC, 'M', '47'), + (0x32BD, 'M', '48'), + (0x32BE, 'M', '49'), + (0x32BF, 'M', '50'), + (0x32C0, 'M', '1月'), + (0x32C1, 'M', '2月'), + (0x32C2, 'M', '3月'), + (0x32C3, 'M', '4月'), + (0x32C4, 'M', '5月'), + (0x32C5, 'M', '6月'), + (0x32C6, 'M', '7月'), + (0x32C7, 'M', '8月'), + (0x32C8, 'M', '9月'), + (0x32C9, 'M', '10月'), + (0x32CA, 'M', '11月'), + (0x32CB, 'M', '12月'), + (0x32CC, 'M', 'hg'), + (0x32CD, 'M', 'erg'), + (0x32CE, 'M', 'ev'), + (0x32CF, 'M', 'ltd'), + (0x32D0, 'M', 'ア'), + (0x32D1, 'M', 'イ'), + (0x32D2, 'M', 'ウ'), + (0x32D3, 'M', 'エ'), + (0x32D4, 'M', 'オ'), + (0x32D5, 'M', 'カ'), + (0x32D6, 'M', 'キ'), + (0x32D7, 'M', 'ク'), + (0x32D8, 'M', 'ケ'), + (0x32D9, 'M', 'コ'), + (0x32DA, 'M', 'サ'), + (0x32DB, 'M', 'シ'), + (0x32DC, 'M', 'ス'), + (0x32DD, 'M', 'セ'), + (0x32DE, 'M', 'ソ'), + (0x32DF, 'M', 'タ'), + (0x32E0, 'M', 'チ'), + (0x32E1, 'M', 'ツ'), + (0x32E2, 'M', 'テ'), + (0x32E3, 'M', 'ト'), + (0x32E4, 'M', 'ナ'), + (0x32E5, 'M', 'ニ'), + (0x32E6, 'M', 'ヌ'), + (0x32E7, 'M', 'ネ'), + (0x32E8, 'M', 'ノ'), + (0x32E9, 'M', 'ハ'), + (0x32EA, 'M', 'ヒ'), + (0x32EB, 'M', 'フ'), + (0x32EC, 'M', 'ヘ'), + (0x32ED, 'M', 'ホ'), + (0x32EE, 'M', 'マ'), + (0x32EF, 'M', 'ミ'), + (0x32F0, 'M', 'ム'), + (0x32F1, 'M', 'メ'), + (0x32F2, 'M', 'モ'), + (0x32F3, 'M', 'ヤ'), + (0x32F4, 'M', 'ユ'), + (0x32F5, 'M', 'ヨ'), + (0x32F6, 'M', 'ラ'), + (0x32F7, 'M', 'リ'), + (0x32F8, 'M', 'ル'), + (0x32F9, 'M', 'レ'), + (0x32FA, 'M', 'ロ'), + (0x32FB, 'M', 'ワ'), + (0x32FC, 'M', 'ヰ'), + (0x32FD, 'M', 'ヱ'), + (0x32FE, 'M', 'ヲ'), + (0x32FF, 'M', '令和'), + (0x3300, 'M', 'アパート'), + (0x3301, 'M', 'アルファ'), + (0x3302, 'M', 'アンペア'), + (0x3303, 'M', 'アール'), + (0x3304, 'M', 'イニング'), + (0x3305, 'M', 'インチ'), + (0x3306, 'M', 'ウォン'), + (0x3307, 'M', 'エスクード'), + (0x3308, 'M', 'エーカー'), + (0x3309, 'M', 'オンス'), + (0x330A, 'M', 'オーム'), + (0x330B, 'M', 'カイリ'), + (0x330C, 'M', 'カラット'), + (0x330D, 'M', 'カロリー'), + (0x330E, 'M', 'ガロン'), + (0x330F, 'M', 'ガンマ'), + (0x3310, 'M', 'ギガ'), + (0x3311, 'M', 'ギニー'), + (0x3312, 'M', 'キュリー'), + (0x3313, 'M', 'ギルダー'), + (0x3314, 'M', 'キロ'), + (0x3315, 'M', 'キログラム'), + ] + +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3316, 'M', 'キロメートル'), + (0x3317, 'M', 'キロワット'), + (0x3318, 'M', 'グラム'), + (0x3319, 'M', 'グラムトン'), + (0x331A, 'M', 'クルゼイロ'), + (0x331B, 'M', 'クローネ'), + (0x331C, 'M', 'ケース'), + (0x331D, 'M', 'コルナ'), + (0x331E, 'M', 'コーポ'), + (0x331F, 'M', 'サイクル'), + (0x3320, 'M', 'サンチーム'), + (0x3321, 'M', 'シリング'), + (0x3322, 'M', 'センチ'), + (0x3323, 'M', 'セント'), + (0x3324, 'M', 'ダース'), + (0x3325, 'M', 'デシ'), + (0x3326, 'M', 'ドル'), + (0x3327, 'M', 'トン'), + (0x3328, 'M', 'ナノ'), + (0x3329, 'M', 'ノット'), + (0x332A, 'M', 'ハイツ'), + (0x332B, 'M', 'パーセント'), + (0x332C, 'M', 'パーツ'), + (0x332D, 'M', 'バーレル'), + (0x332E, 'M', 'ピアストル'), + (0x332F, 'M', 'ピクル'), + (0x3330, 'M', 'ピコ'), + (0x3331, 'M', 'ビル'), + (0x3332, 'M', 'ファラッド'), + (0x3333, 'M', 'フィート'), + (0x3334, 'M', 'ブッシェル'), + (0x3335, 'M', 'フラン'), + (0x3336, 'M', 'ヘクタール'), + (0x3337, 'M', 'ペソ'), + (0x3338, 'M', 'ペニヒ'), + (0x3339, 'M', 'ヘルツ'), + (0x333A, 'M', 'ペンス'), + (0x333B, 'M', 'ページ'), + (0x333C, 'M', 'ベータ'), + (0x333D, 'M', 'ポイント'), + (0x333E, 'M', 'ボルト'), + (0x333F, 'M', 'ホン'), + (0x3340, 'M', 'ポンド'), + (0x3341, 'M', 'ホール'), + (0x3342, 'M', 'ホーン'), + (0x3343, 'M', 'マイクロ'), + (0x3344, 'M', 'マイル'), + (0x3345, 'M', 'マッハ'), + (0x3346, 'M', 'マルク'), + (0x3347, 'M', 'マンション'), + (0x3348, 'M', 'ミクロン'), + (0x3349, 'M', 'ミリ'), + (0x334A, 'M', 'ミリバール'), + (0x334B, 'M', 'メガ'), + (0x334C, 'M', 'メガトン'), + (0x334D, 'M', 'メートル'), + (0x334E, 'M', 'ヤード'), + (0x334F, 'M', 'ヤール'), + (0x3350, 'M', 'ユアン'), + (0x3351, 'M', 'リットル'), + (0x3352, 'M', 'リラ'), + (0x3353, 'M', 'ルピー'), + (0x3354, 'M', 'ルーブル'), + (0x3355, 'M', 'レム'), + (0x3356, 'M', 'レントゲン'), + (0x3357, 'M', 'ワット'), + (0x3358, 'M', '0点'), + (0x3359, 'M', '1点'), + (0x335A, 'M', '2点'), + (0x335B, 'M', '3点'), + (0x335C, 'M', '4点'), + (0x335D, 'M', '5点'), + (0x335E, 'M', '6点'), + (0x335F, 'M', '7点'), + (0x3360, 'M', '8点'), + (0x3361, 'M', '9点'), + (0x3362, 'M', '10点'), + (0x3363, 'M', '11点'), + (0x3364, 'M', '12点'), + (0x3365, 'M', '13点'), + (0x3366, 'M', '14点'), + (0x3367, 'M', '15点'), + (0x3368, 'M', '16点'), + (0x3369, 'M', '17点'), + (0x336A, 'M', '18点'), + (0x336B, 'M', '19点'), + (0x336C, 'M', '20点'), + (0x336D, 'M', '21点'), + (0x336E, 'M', '22点'), + (0x336F, 'M', '23点'), + (0x3370, 'M', '24点'), + (0x3371, 'M', 'hpa'), + (0x3372, 'M', 'da'), + (0x3373, 'M', 'au'), + (0x3374, 'M', 'bar'), + (0x3375, 'M', 'ov'), + (0x3376, 'M', 'pc'), + (0x3377, 'M', 'dm'), + (0x3378, 'M', 'dm2'), + (0x3379, 'M', 'dm3'), + ] + +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x337A, 'M', 'iu'), + (0x337B, 'M', '平成'), + (0x337C, 'M', '昭和'), + (0x337D, 'M', '大正'), + (0x337E, 'M', '明治'), + (0x337F, 'M', '株式会社'), + (0x3380, 'M', 'pa'), + (0x3381, 'M', 'na'), + (0x3382, 'M', 'μa'), + (0x3383, 'M', 'ma'), + (0x3384, 'M', 'ka'), + (0x3385, 'M', 'kb'), + (0x3386, 'M', 'mb'), + (0x3387, 'M', 'gb'), + (0x3388, 'M', 'cal'), + (0x3389, 'M', 'kcal'), + (0x338A, 'M', 'pf'), + (0x338B, 'M', 'nf'), + (0x338C, 'M', 'μf'), + (0x338D, 'M', 'μg'), + (0x338E, 'M', 'mg'), + (0x338F, 'M', 'kg'), + (0x3390, 'M', 'hz'), + (0x3391, 'M', 'khz'), + (0x3392, 'M', 'mhz'), + (0x3393, 'M', 'ghz'), + (0x3394, 'M', 'thz'), + (0x3395, 'M', 'μl'), + (0x3396, 'M', 'ml'), + (0x3397, 'M', 'dl'), + (0x3398, 'M', 'kl'), + (0x3399, 'M', 'fm'), + (0x339A, 'M', 'nm'), + (0x339B, 'M', 'μm'), + (0x339C, 'M', 'mm'), + (0x339D, 'M', 'cm'), + (0x339E, 'M', 'km'), + (0x339F, 'M', 'mm2'), + (0x33A0, 'M', 'cm2'), + (0x33A1, 'M', 'm2'), + (0x33A2, 'M', 'km2'), + (0x33A3, 'M', 'mm3'), + (0x33A4, 'M', 'cm3'), + (0x33A5, 'M', 'm3'), + (0x33A6, 'M', 'km3'), + (0x33A7, 'M', 'm∕s'), + (0x33A8, 'M', 'm∕s2'), + (0x33A9, 'M', 'pa'), + (0x33AA, 'M', 'kpa'), + (0x33AB, 'M', 'mpa'), + (0x33AC, 'M', 'gpa'), + (0x33AD, 'M', 'rad'), + (0x33AE, 'M', 'rad∕s'), + (0x33AF, 'M', 'rad∕s2'), + (0x33B0, 'M', 'ps'), + (0x33B1, 'M', 'ns'), + (0x33B2, 'M', 'μs'), + (0x33B3, 'M', 'ms'), + (0x33B4, 'M', 'pv'), + (0x33B5, 'M', 'nv'), + (0x33B6, 'M', 'μv'), + (0x33B7, 'M', 'mv'), + (0x33B8, 'M', 'kv'), + (0x33B9, 'M', 'mv'), + (0x33BA, 'M', 'pw'), + (0x33BB, 'M', 'nw'), + (0x33BC, 'M', 'μw'), + (0x33BD, 'M', 'mw'), + (0x33BE, 'M', 'kw'), + (0x33BF, 'M', 'mw'), + (0x33C0, 'M', 'kω'), + (0x33C1, 'M', 'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', 'bq'), + (0x33C4, 'M', 'cc'), + (0x33C5, 'M', 'cd'), + (0x33C6, 'M', 'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', 'db'), + (0x33C9, 'M', 'gy'), + (0x33CA, 'M', 'ha'), + (0x33CB, 'M', 'hp'), + (0x33CC, 'M', 'in'), + (0x33CD, 'M', 'kk'), + (0x33CE, 'M', 'km'), + (0x33CF, 'M', 'kt'), + (0x33D0, 'M', 'lm'), + (0x33D1, 'M', 'ln'), + (0x33D2, 'M', 'log'), + (0x33D3, 'M', 'lx'), + (0x33D4, 'M', 'mb'), + (0x33D5, 'M', 'mil'), + (0x33D6, 'M', 'mol'), + (0x33D7, 'M', 'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', 'ppm'), + (0x33DA, 'M', 'pr'), + (0x33DB, 'M', 'sr'), + (0x33DC, 'M', 'sv'), + (0x33DD, 'M', 'wb'), + ] + +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x33DE, 'M', 'v∕m'), + (0x33DF, 'M', 'a∕m'), + (0x33E0, 'M', '1日'), + (0x33E1, 'M', '2日'), + (0x33E2, 'M', '3日'), + (0x33E3, 'M', '4日'), + (0x33E4, 'M', '5日'), + (0x33E5, 'M', '6日'), + (0x33E6, 'M', '7日'), + (0x33E7, 'M', '8日'), + (0x33E8, 'M', '9日'), + (0x33E9, 'M', '10日'), + (0x33EA, 'M', '11日'), + (0x33EB, 'M', '12日'), + (0x33EC, 'M', '13日'), + (0x33ED, 'M', '14日'), + (0x33EE, 'M', '15日'), + (0x33EF, 'M', '16日'), + (0x33F0, 'M', '17日'), + (0x33F1, 'M', '18日'), + (0x33F2, 'M', '19日'), + (0x33F3, 'M', '20日'), + (0x33F4, 'M', '21日'), + (0x33F5, 'M', '22日'), + (0x33F6, 'M', '23日'), + (0x33F7, 'M', '24日'), + (0x33F8, 'M', '25日'), + (0x33F9, 'M', '26日'), + (0x33FA, 'M', '27日'), + (0x33FB, 'M', '28日'), + (0x33FC, 'M', '29日'), + (0x33FD, 'M', '30日'), + (0x33FE, 'M', '31日'), + (0x33FF, 'M', 'gal'), + (0x3400, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', 'ꙁ'), + (0xA641, 'V'), + (0xA642, 'M', 'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', 'ꙅ'), + (0xA645, 'V'), + (0xA646, 'M', 'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', 'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', 'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', 'ꙍ'), + (0xA64D, 'V'), + (0xA64E, 'M', 'ꙏ'), + (0xA64F, 'V'), + (0xA650, 'M', 'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', 'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', 'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', 'ꙗ'), + (0xA657, 'V'), + (0xA658, 'M', 'ꙙ'), + (0xA659, 'V'), + (0xA65A, 'M', 'ꙛ'), + (0xA65B, 'V'), + (0xA65C, 'M', 'ꙝ'), + (0xA65D, 'V'), + (0xA65E, 'M', 'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', 'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', 'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', 'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', 'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', 'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', 'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', 'ꙭ'), + (0xA66D, 'V'), + (0xA680, 'M', 'ꚁ'), + (0xA681, 'V'), + (0xA682, 'M', 'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', 'ꚅ'), + (0xA685, 'V'), + (0xA686, 'M', 'ꚇ'), + (0xA687, 'V'), + (0xA688, 'M', 'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', 'ꚋ'), + (0xA68B, 'V'), + (0xA68C, 'M', 'ꚍ'), + (0xA68D, 'V'), + ] + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA68E, 'M', 'ꚏ'), + (0xA68F, 'V'), + (0xA690, 'M', 'ꚑ'), + (0xA691, 'V'), + (0xA692, 'M', 'ꚓ'), + (0xA693, 'V'), + (0xA694, 'M', 'ꚕ'), + (0xA695, 'V'), + (0xA696, 'M', 'ꚗ'), + (0xA697, 'V'), + (0xA698, 'M', 'ꚙ'), + (0xA699, 'V'), + (0xA69A, 'M', 'ꚛ'), + (0xA69B, 'V'), + (0xA69C, 'M', 'ъ'), + (0xA69D, 'M', 'ь'), + (0xA69E, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', 'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', 'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', 'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', 'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', 'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', 'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', 'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', 'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', 'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', 'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', 'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', 'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', 'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', 'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', 'ꝁ'), + (0xA741, 'V'), + (0xA742, 'M', 'ꝃ'), + (0xA743, 'V'), + (0xA744, 'M', 'ꝅ'), + (0xA745, 'V'), + (0xA746, 'M', 'ꝇ'), + (0xA747, 'V'), + (0xA748, 'M', 'ꝉ'), + (0xA749, 'V'), + (0xA74A, 'M', 'ꝋ'), + (0xA74B, 'V'), + (0xA74C, 'M', 'ꝍ'), + (0xA74D, 'V'), + (0xA74E, 'M', 'ꝏ'), + (0xA74F, 'V'), + (0xA750, 'M', 'ꝑ'), + (0xA751, 'V'), + (0xA752, 'M', 'ꝓ'), + (0xA753, 'V'), + (0xA754, 'M', 'ꝕ'), + (0xA755, 'V'), + (0xA756, 'M', 'ꝗ'), + (0xA757, 'V'), + (0xA758, 'M', 'ꝙ'), + (0xA759, 'V'), + (0xA75A, 'M', 'ꝛ'), + (0xA75B, 'V'), + (0xA75C, 'M', 'ꝝ'), + (0xA75D, 'V'), + (0xA75E, 'M', 'ꝟ'), + (0xA75F, 'V'), + (0xA760, 'M', 'ꝡ'), + (0xA761, 'V'), + (0xA762, 'M', 'ꝣ'), + (0xA763, 'V'), + (0xA764, 'M', 'ꝥ'), + (0xA765, 'V'), + (0xA766, 'M', 'ꝧ'), + (0xA767, 'V'), + (0xA768, 'M', 'ꝩ'), + (0xA769, 'V'), + (0xA76A, 'M', 'ꝫ'), + (0xA76B, 'V'), + (0xA76C, 'M', 'ꝭ'), + (0xA76D, 'V'), + (0xA76E, 'M', 'ꝯ'), + (0xA76F, 'V'), + (0xA770, 'M', 'ꝯ'), + (0xA771, 'V'), + (0xA779, 'M', 'ꝺ'), + (0xA77A, 'V'), + (0xA77B, 'M', 'ꝼ'), + ] + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA77C, 'V'), + (0xA77D, 'M', 'ᵹ'), + (0xA77E, 'M', 'ꝿ'), + (0xA77F, 'V'), + (0xA780, 'M', 'ꞁ'), + (0xA781, 'V'), + (0xA782, 'M', 'ꞃ'), + (0xA783, 'V'), + (0xA784, 'M', 'ꞅ'), + (0xA785, 'V'), + (0xA786, 'M', 'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', 'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', 'ɥ'), + (0xA78E, 'V'), + (0xA790, 'M', 'ꞑ'), + (0xA791, 'V'), + (0xA792, 'M', 'ꞓ'), + (0xA793, 'V'), + (0xA796, 'M', 'ꞗ'), + (0xA797, 'V'), + (0xA798, 'M', 'ꞙ'), + (0xA799, 'V'), + (0xA79A, 'M', 'ꞛ'), + (0xA79B, 'V'), + (0xA79C, 'M', 'ꞝ'), + (0xA79D, 'V'), + (0xA79E, 'M', 'ꞟ'), + (0xA79F, 'V'), + (0xA7A0, 'M', 'ꞡ'), + (0xA7A1, 'V'), + (0xA7A2, 'M', 'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', 'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', 'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', 'ꞩ'), + (0xA7A9, 'V'), + (0xA7AA, 'M', 'ɦ'), + (0xA7AB, 'M', 'ɜ'), + (0xA7AC, 'M', 'ɡ'), + (0xA7AD, 'M', 'ɬ'), + (0xA7AE, 'M', 'ɪ'), + (0xA7AF, 'V'), + (0xA7B0, 'M', 'ʞ'), + (0xA7B1, 'M', 'ʇ'), + (0xA7B2, 'M', 'ʝ'), + (0xA7B3, 'M', 'ꭓ'), + (0xA7B4, 'M', 'ꞵ'), + (0xA7B5, 'V'), + (0xA7B6, 'M', 'ꞷ'), + (0xA7B7, 'V'), + (0xA7B8, 'M', 'ꞹ'), + (0xA7B9, 'V'), + (0xA7BA, 'M', 'ꞻ'), + (0xA7BB, 'V'), + (0xA7BC, 'M', 'ꞽ'), + (0xA7BD, 'V'), + (0xA7BE, 'M', 'ꞿ'), + (0xA7BF, 'V'), + (0xA7C0, 'M', 'ꟁ'), + (0xA7C1, 'V'), + (0xA7C2, 'M', 'ꟃ'), + (0xA7C3, 'V'), + (0xA7C4, 'M', 'ꞔ'), + (0xA7C5, 'M', 'ʂ'), + (0xA7C6, 'M', 'ᶎ'), + (0xA7C7, 'M', 'ꟈ'), + (0xA7C8, 'V'), + (0xA7C9, 'M', 'ꟊ'), + (0xA7CA, 'V'), + (0xA7CB, 'X'), + (0xA7D0, 'M', 'ꟑ'), + (0xA7D1, 'V'), + (0xA7D2, 'X'), + (0xA7D3, 'V'), + (0xA7D4, 'X'), + (0xA7D5, 'V'), + (0xA7D6, 'M', 'ꟗ'), + (0xA7D7, 'V'), + (0xA7D8, 'M', 'ꟙ'), + (0xA7D9, 'V'), + (0xA7DA, 'X'), + (0xA7F2, 'M', 'c'), + (0xA7F3, 'M', 'f'), + (0xA7F4, 'M', 'q'), + (0xA7F5, 'M', 'ꟶ'), + (0xA7F6, 'V'), + (0xA7F8, 'M', 'ħ'), + (0xA7F9, 'M', 'œ'), + (0xA7FA, 'V'), + (0xA82D, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C6, 'X'), + ] + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA954, 'X'), + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9FF, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xAB30, 'V'), + (0xAB5C, 'M', 'ꜧ'), + (0xAB5D, 'M', 'ꬷ'), + (0xAB5E, 'M', 'ɫ'), + (0xAB5F, 'M', 'ꭒ'), + (0xAB60, 'V'), + (0xAB69, 'M', 'ʍ'), + (0xAB6A, 'V'), + (0xAB6C, 'X'), + (0xAB70, 'M', 'Ꭰ'), + (0xAB71, 'M', 'Ꭱ'), + (0xAB72, 'M', 'Ꭲ'), + (0xAB73, 'M', 'Ꭳ'), + (0xAB74, 'M', 'Ꭴ'), + (0xAB75, 'M', 'Ꭵ'), + (0xAB76, 'M', 'Ꭶ'), + (0xAB77, 'M', 'Ꭷ'), + (0xAB78, 'M', 'Ꭸ'), + (0xAB79, 'M', 'Ꭹ'), + (0xAB7A, 'M', 'Ꭺ'), + (0xAB7B, 'M', 'Ꭻ'), + (0xAB7C, 'M', 'Ꭼ'), + (0xAB7D, 'M', 'Ꭽ'), + (0xAB7E, 'M', 'Ꭾ'), + (0xAB7F, 'M', 'Ꭿ'), + (0xAB80, 'M', 'Ꮀ'), + (0xAB81, 'M', 'Ꮁ'), + (0xAB82, 'M', 'Ꮂ'), + (0xAB83, 'M', 'Ꮃ'), + (0xAB84, 'M', 'Ꮄ'), + (0xAB85, 'M', 'Ꮅ'), + (0xAB86, 'M', 'Ꮆ'), + (0xAB87, 'M', 'Ꮇ'), + (0xAB88, 'M', 'Ꮈ'), + (0xAB89, 'M', 'Ꮉ'), + (0xAB8A, 'M', 'Ꮊ'), + (0xAB8B, 'M', 'Ꮋ'), + (0xAB8C, 'M', 'Ꮌ'), + (0xAB8D, 'M', 'Ꮍ'), + (0xAB8E, 'M', 'Ꮎ'), + (0xAB8F, 'M', 'Ꮏ'), + (0xAB90, 'M', 'Ꮐ'), + (0xAB91, 'M', 'Ꮑ'), + (0xAB92, 'M', 'Ꮒ'), + (0xAB93, 'M', 'Ꮓ'), + (0xAB94, 'M', 'Ꮔ'), + (0xAB95, 'M', 'Ꮕ'), + (0xAB96, 'M', 'Ꮖ'), + (0xAB97, 'M', 'Ꮗ'), + (0xAB98, 'M', 'Ꮘ'), + (0xAB99, 'M', 'Ꮙ'), + (0xAB9A, 'M', 'Ꮚ'), + (0xAB9B, 'M', 'Ꮛ'), + (0xAB9C, 'M', 'Ꮜ'), + (0xAB9D, 'M', 'Ꮝ'), + (0xAB9E, 'M', 'Ꮞ'), + (0xAB9F, 'M', 'Ꮟ'), + (0xABA0, 'M', 'Ꮠ'), + (0xABA1, 'M', 'Ꮡ'), + (0xABA2, 'M', 'Ꮢ'), + (0xABA3, 'M', 'Ꮣ'), + (0xABA4, 'M', 'Ꮤ'), + (0xABA5, 'M', 'Ꮥ'), + (0xABA6, 'M', 'Ꮦ'), + (0xABA7, 'M', 'Ꮧ'), + (0xABA8, 'M', 'Ꮨ'), + (0xABA9, 'M', 'Ꮩ'), + (0xABAA, 'M', 'Ꮪ'), + ] + +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xABAB, 'M', 'Ꮫ'), + (0xABAC, 'M', 'Ꮬ'), + (0xABAD, 'M', 'Ꮭ'), + (0xABAE, 'M', 'Ꮮ'), + (0xABAF, 'M', 'Ꮯ'), + (0xABB0, 'M', 'Ꮰ'), + (0xABB1, 'M', 'Ꮱ'), + (0xABB2, 'M', 'Ꮲ'), + (0xABB3, 'M', 'Ꮳ'), + (0xABB4, 'M', 'Ꮴ'), + (0xABB5, 'M', 'Ꮵ'), + (0xABB6, 'M', 'Ꮶ'), + (0xABB7, 'M', 'Ꮷ'), + (0xABB8, 'M', 'Ꮸ'), + (0xABB9, 'M', 'Ꮹ'), + (0xABBA, 'M', 'Ꮺ'), + (0xABBB, 'M', 'Ꮻ'), + (0xABBC, 'M', 'Ꮼ'), + (0xABBD, 'M', 'Ꮽ'), + (0xABBE, 'M', 'Ꮾ'), + (0xABBF, 'M', 'Ꮿ'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', '豈'), + (0xF901, 'M', '更'), + (0xF902, 'M', '車'), + (0xF903, 'M', '賈'), + (0xF904, 'M', '滑'), + (0xF905, 'M', '串'), + (0xF906, 'M', '句'), + (0xF907, 'M', '龜'), + (0xF909, 'M', '契'), + (0xF90A, 'M', '金'), + (0xF90B, 'M', '喇'), + (0xF90C, 'M', '奈'), + (0xF90D, 'M', '懶'), + (0xF90E, 'M', '癩'), + (0xF90F, 'M', '羅'), + (0xF910, 'M', '蘿'), + (0xF911, 'M', '螺'), + (0xF912, 'M', '裸'), + (0xF913, 'M', '邏'), + (0xF914, 'M', '樂'), + (0xF915, 'M', '洛'), + (0xF916, 'M', '烙'), + (0xF917, 'M', '珞'), + (0xF918, 'M', '落'), + (0xF919, 'M', '酪'), + (0xF91A, 'M', '駱'), + (0xF91B, 'M', '亂'), + (0xF91C, 'M', '卵'), + (0xF91D, 'M', '欄'), + (0xF91E, 'M', '爛'), + (0xF91F, 'M', '蘭'), + (0xF920, 'M', '鸞'), + (0xF921, 'M', '嵐'), + (0xF922, 'M', '濫'), + (0xF923, 'M', '藍'), + (0xF924, 'M', '襤'), + (0xF925, 'M', '拉'), + (0xF926, 'M', '臘'), + (0xF927, 'M', '蠟'), + (0xF928, 'M', '廊'), + (0xF929, 'M', '朗'), + (0xF92A, 'M', '浪'), + (0xF92B, 'M', '狼'), + (0xF92C, 'M', '郎'), + (0xF92D, 'M', '來'), + (0xF92E, 'M', '冷'), + (0xF92F, 'M', '勞'), + (0xF930, 'M', '擄'), + (0xF931, 'M', '櫓'), + (0xF932, 'M', '爐'), + (0xF933, 'M', '盧'), + (0xF934, 'M', '老'), + (0xF935, 'M', '蘆'), + (0xF936, 'M', '虜'), + (0xF937, 'M', '路'), + (0xF938, 'M', '露'), + (0xF939, 'M', '魯'), + (0xF93A, 'M', '鷺'), + (0xF93B, 'M', '碌'), + (0xF93C, 'M', '祿'), + (0xF93D, 'M', '綠'), + (0xF93E, 'M', '菉'), + (0xF93F, 'M', '錄'), + (0xF940, 'M', '鹿'), + (0xF941, 'M', '論'), + (0xF942, 'M', '壟'), + (0xF943, 'M', '弄'), + (0xF944, 'M', '籠'), + (0xF945, 'M', '聾'), + ] + +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF946, 'M', '牢'), + (0xF947, 'M', '磊'), + (0xF948, 'M', '賂'), + (0xF949, 'M', '雷'), + (0xF94A, 'M', '壘'), + (0xF94B, 'M', '屢'), + (0xF94C, 'M', '樓'), + (0xF94D, 'M', '淚'), + (0xF94E, 'M', '漏'), + (0xF94F, 'M', '累'), + (0xF950, 'M', '縷'), + (0xF951, 'M', '陋'), + (0xF952, 'M', '勒'), + (0xF953, 'M', '肋'), + (0xF954, 'M', '凜'), + (0xF955, 'M', '凌'), + (0xF956, 'M', '稜'), + (0xF957, 'M', '綾'), + (0xF958, 'M', '菱'), + (0xF959, 'M', '陵'), + (0xF95A, 'M', '讀'), + (0xF95B, 'M', '拏'), + (0xF95C, 'M', '樂'), + (0xF95D, 'M', '諾'), + (0xF95E, 'M', '丹'), + (0xF95F, 'M', '寧'), + (0xF960, 'M', '怒'), + (0xF961, 'M', '率'), + (0xF962, 'M', '異'), + (0xF963, 'M', '北'), + (0xF964, 'M', '磻'), + (0xF965, 'M', '便'), + (0xF966, 'M', '復'), + (0xF967, 'M', '不'), + (0xF968, 'M', '泌'), + (0xF969, 'M', '數'), + (0xF96A, 'M', '索'), + (0xF96B, 'M', '參'), + (0xF96C, 'M', '塞'), + (0xF96D, 'M', '省'), + (0xF96E, 'M', '葉'), + (0xF96F, 'M', '說'), + (0xF970, 'M', '殺'), + (0xF971, 'M', '辰'), + (0xF972, 'M', '沈'), + (0xF973, 'M', '拾'), + (0xF974, 'M', '若'), + (0xF975, 'M', '掠'), + (0xF976, 'M', '略'), + (0xF977, 'M', '亮'), + (0xF978, 'M', '兩'), + (0xF979, 'M', '凉'), + (0xF97A, 'M', '梁'), + (0xF97B, 'M', '糧'), + (0xF97C, 'M', '良'), + (0xF97D, 'M', '諒'), + (0xF97E, 'M', '量'), + (0xF97F, 'M', '勵'), + (0xF980, 'M', '呂'), + (0xF981, 'M', '女'), + (0xF982, 'M', '廬'), + (0xF983, 'M', '旅'), + (0xF984, 'M', '濾'), + (0xF985, 'M', '礪'), + (0xF986, 'M', '閭'), + (0xF987, 'M', '驪'), + (0xF988, 'M', '麗'), + (0xF989, 'M', '黎'), + (0xF98A, 'M', '力'), + (0xF98B, 'M', '曆'), + (0xF98C, 'M', '歷'), + (0xF98D, 'M', '轢'), + (0xF98E, 'M', '年'), + (0xF98F, 'M', '憐'), + (0xF990, 'M', '戀'), + (0xF991, 'M', '撚'), + (0xF992, 'M', '漣'), + (0xF993, 'M', '煉'), + (0xF994, 'M', '璉'), + (0xF995, 'M', '秊'), + (0xF996, 'M', '練'), + (0xF997, 'M', '聯'), + (0xF998, 'M', '輦'), + (0xF999, 'M', '蓮'), + (0xF99A, 'M', '連'), + (0xF99B, 'M', '鍊'), + (0xF99C, 'M', '列'), + (0xF99D, 'M', '劣'), + (0xF99E, 'M', '咽'), + (0xF99F, 'M', '烈'), + (0xF9A0, 'M', '裂'), + (0xF9A1, 'M', '說'), + (0xF9A2, 'M', '廉'), + (0xF9A3, 'M', '念'), + (0xF9A4, 'M', '捻'), + (0xF9A5, 'M', '殮'), + (0xF9A6, 'M', '簾'), + (0xF9A7, 'M', '獵'), + (0xF9A8, 'M', '令'), + (0xF9A9, 'M', '囹'), + ] + +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF9AA, 'M', '寧'), + (0xF9AB, 'M', '嶺'), + (0xF9AC, 'M', '怜'), + (0xF9AD, 'M', '玲'), + (0xF9AE, 'M', '瑩'), + (0xF9AF, 'M', '羚'), + (0xF9B0, 'M', '聆'), + (0xF9B1, 'M', '鈴'), + (0xF9B2, 'M', '零'), + (0xF9B3, 'M', '靈'), + (0xF9B4, 'M', '領'), + (0xF9B5, 'M', '例'), + (0xF9B6, 'M', '禮'), + (0xF9B7, 'M', '醴'), + (0xF9B8, 'M', '隸'), + (0xF9B9, 'M', '惡'), + (0xF9BA, 'M', '了'), + (0xF9BB, 'M', '僚'), + (0xF9BC, 'M', '寮'), + (0xF9BD, 'M', '尿'), + (0xF9BE, 'M', '料'), + (0xF9BF, 'M', '樂'), + (0xF9C0, 'M', '燎'), + (0xF9C1, 'M', '療'), + (0xF9C2, 'M', '蓼'), + (0xF9C3, 'M', '遼'), + (0xF9C4, 'M', '龍'), + (0xF9C5, 'M', '暈'), + (0xF9C6, 'M', '阮'), + (0xF9C7, 'M', '劉'), + (0xF9C8, 'M', '杻'), + (0xF9C9, 'M', '柳'), + (0xF9CA, 'M', '流'), + (0xF9CB, 'M', '溜'), + (0xF9CC, 'M', '琉'), + (0xF9CD, 'M', '留'), + (0xF9CE, 'M', '硫'), + (0xF9CF, 'M', '紐'), + (0xF9D0, 'M', '類'), + (0xF9D1, 'M', '六'), + (0xF9D2, 'M', '戮'), + (0xF9D3, 'M', '陸'), + (0xF9D4, 'M', '倫'), + (0xF9D5, 'M', '崙'), + (0xF9D6, 'M', '淪'), + (0xF9D7, 'M', '輪'), + (0xF9D8, 'M', '律'), + (0xF9D9, 'M', '慄'), + (0xF9DA, 'M', '栗'), + (0xF9DB, 'M', '率'), + (0xF9DC, 'M', '隆'), + (0xF9DD, 'M', '利'), + (0xF9DE, 'M', '吏'), + (0xF9DF, 'M', '履'), + (0xF9E0, 'M', '易'), + (0xF9E1, 'M', '李'), + (0xF9E2, 'M', '梨'), + (0xF9E3, 'M', '泥'), + (0xF9E4, 'M', '理'), + (0xF9E5, 'M', '痢'), + (0xF9E6, 'M', '罹'), + (0xF9E7, 'M', '裏'), + (0xF9E8, 'M', '裡'), + (0xF9E9, 'M', '里'), + (0xF9EA, 'M', '離'), + (0xF9EB, 'M', '匿'), + (0xF9EC, 'M', '溺'), + (0xF9ED, 'M', '吝'), + (0xF9EE, 'M', '燐'), + (0xF9EF, 'M', '璘'), + (0xF9F0, 'M', '藺'), + (0xF9F1, 'M', '隣'), + (0xF9F2, 'M', '鱗'), + (0xF9F3, 'M', '麟'), + (0xF9F4, 'M', '林'), + (0xF9F5, 'M', '淋'), + (0xF9F6, 'M', '臨'), + (0xF9F7, 'M', '立'), + (0xF9F8, 'M', '笠'), + (0xF9F9, 'M', '粒'), + (0xF9FA, 'M', '狀'), + (0xF9FB, 'M', '炙'), + (0xF9FC, 'M', '識'), + (0xF9FD, 'M', '什'), + (0xF9FE, 'M', '茶'), + (0xF9FF, 'M', '刺'), + (0xFA00, 'M', '切'), + (0xFA01, 'M', '度'), + (0xFA02, 'M', '拓'), + (0xFA03, 'M', '糖'), + (0xFA04, 'M', '宅'), + (0xFA05, 'M', '洞'), + (0xFA06, 'M', '暴'), + (0xFA07, 'M', '輻'), + (0xFA08, 'M', '行'), + (0xFA09, 'M', '降'), + (0xFA0A, 'M', '見'), + (0xFA0B, 'M', '廓'), + (0xFA0C, 'M', '兀'), + (0xFA0D, 'M', '嗀'), + ] + +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA0E, 'V'), + (0xFA10, 'M', '塚'), + (0xFA11, 'V'), + (0xFA12, 'M', '晴'), + (0xFA13, 'V'), + (0xFA15, 'M', '凞'), + (0xFA16, 'M', '猪'), + (0xFA17, 'M', '益'), + (0xFA18, 'M', '礼'), + (0xFA19, 'M', '神'), + (0xFA1A, 'M', '祥'), + (0xFA1B, 'M', '福'), + (0xFA1C, 'M', '靖'), + (0xFA1D, 'M', '精'), + (0xFA1E, 'M', '羽'), + (0xFA1F, 'V'), + (0xFA20, 'M', '蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', '諸'), + (0xFA23, 'V'), + (0xFA25, 'M', '逸'), + (0xFA26, 'M', '都'), + (0xFA27, 'V'), + (0xFA2A, 'M', '飯'), + (0xFA2B, 'M', '飼'), + (0xFA2C, 'M', '館'), + (0xFA2D, 'M', '鶴'), + (0xFA2E, 'M', '郞'), + (0xFA2F, 'M', '隷'), + (0xFA30, 'M', '侮'), + (0xFA31, 'M', '僧'), + (0xFA32, 'M', '免'), + (0xFA33, 'M', '勉'), + (0xFA34, 'M', '勤'), + (0xFA35, 'M', '卑'), + (0xFA36, 'M', '喝'), + (0xFA37, 'M', '嘆'), + (0xFA38, 'M', '器'), + (0xFA39, 'M', '塀'), + (0xFA3A, 'M', '墨'), + (0xFA3B, 'M', '層'), + (0xFA3C, 'M', '屮'), + (0xFA3D, 'M', '悔'), + (0xFA3E, 'M', '慨'), + (0xFA3F, 'M', '憎'), + (0xFA40, 'M', '懲'), + (0xFA41, 'M', '敏'), + (0xFA42, 'M', '既'), + (0xFA43, 'M', '暑'), + (0xFA44, 'M', '梅'), + (0xFA45, 'M', '海'), + (0xFA46, 'M', '渚'), + (0xFA47, 'M', '漢'), + (0xFA48, 'M', '煮'), + (0xFA49, 'M', '爫'), + (0xFA4A, 'M', '琢'), + (0xFA4B, 'M', '碑'), + (0xFA4C, 'M', '社'), + (0xFA4D, 'M', '祉'), + (0xFA4E, 'M', '祈'), + (0xFA4F, 'M', '祐'), + (0xFA50, 'M', '祖'), + (0xFA51, 'M', '祝'), + (0xFA52, 'M', '禍'), + (0xFA53, 'M', '禎'), + (0xFA54, 'M', '穀'), + (0xFA55, 'M', '突'), + (0xFA56, 'M', '節'), + (0xFA57, 'M', '練'), + (0xFA58, 'M', '縉'), + (0xFA59, 'M', '繁'), + (0xFA5A, 'M', '署'), + (0xFA5B, 'M', '者'), + (0xFA5C, 'M', '臭'), + (0xFA5D, 'M', '艹'), + (0xFA5F, 'M', '著'), + (0xFA60, 'M', '褐'), + (0xFA61, 'M', '視'), + (0xFA62, 'M', '謁'), + (0xFA63, 'M', '謹'), + (0xFA64, 'M', '賓'), + (0xFA65, 'M', '贈'), + (0xFA66, 'M', '辶'), + (0xFA67, 'M', '逸'), + (0xFA68, 'M', '難'), + (0xFA69, 'M', '響'), + (0xFA6A, 'M', '頻'), + (0xFA6B, 'M', '恵'), + (0xFA6C, 'M', '𤋮'), + (0xFA6D, 'M', '舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', '並'), + (0xFA71, 'M', '况'), + (0xFA72, 'M', '全'), + (0xFA73, 'M', '侀'), + (0xFA74, 'M', '充'), + (0xFA75, 'M', '冀'), + (0xFA76, 'M', '勇'), + (0xFA77, 'M', '勺'), + (0xFA78, 'M', '喝'), + ] + +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA79, 'M', '啕'), + (0xFA7A, 'M', '喙'), + (0xFA7B, 'M', '嗢'), + (0xFA7C, 'M', '塚'), + (0xFA7D, 'M', '墳'), + (0xFA7E, 'M', '奄'), + (0xFA7F, 'M', '奔'), + (0xFA80, 'M', '婢'), + (0xFA81, 'M', '嬨'), + (0xFA82, 'M', '廒'), + (0xFA83, 'M', '廙'), + (0xFA84, 'M', '彩'), + (0xFA85, 'M', '徭'), + (0xFA86, 'M', '惘'), + (0xFA87, 'M', '慎'), + (0xFA88, 'M', '愈'), + (0xFA89, 'M', '憎'), + (0xFA8A, 'M', '慠'), + (0xFA8B, 'M', '懲'), + (0xFA8C, 'M', '戴'), + (0xFA8D, 'M', '揄'), + (0xFA8E, 'M', '搜'), + (0xFA8F, 'M', '摒'), + (0xFA90, 'M', '敖'), + (0xFA91, 'M', '晴'), + (0xFA92, 'M', '朗'), + (0xFA93, 'M', '望'), + (0xFA94, 'M', '杖'), + (0xFA95, 'M', '歹'), + (0xFA96, 'M', '殺'), + (0xFA97, 'M', '流'), + (0xFA98, 'M', '滛'), + (0xFA99, 'M', '滋'), + (0xFA9A, 'M', '漢'), + (0xFA9B, 'M', '瀞'), + (0xFA9C, 'M', '煮'), + (0xFA9D, 'M', '瞧'), + (0xFA9E, 'M', '爵'), + (0xFA9F, 'M', '犯'), + (0xFAA0, 'M', '猪'), + (0xFAA1, 'M', '瑱'), + (0xFAA2, 'M', '甆'), + (0xFAA3, 'M', '画'), + (0xFAA4, 'M', '瘝'), + (0xFAA5, 'M', '瘟'), + (0xFAA6, 'M', '益'), + (0xFAA7, 'M', '盛'), + (0xFAA8, 'M', '直'), + (0xFAA9, 'M', '睊'), + (0xFAAA, 'M', '着'), + (0xFAAB, 'M', '磌'), + (0xFAAC, 'M', '窱'), + (0xFAAD, 'M', '節'), + (0xFAAE, 'M', '类'), + (0xFAAF, 'M', '絛'), + (0xFAB0, 'M', '練'), + (0xFAB1, 'M', '缾'), + (0xFAB2, 'M', '者'), + (0xFAB3, 'M', '荒'), + (0xFAB4, 'M', '華'), + (0xFAB5, 'M', '蝹'), + (0xFAB6, 'M', '襁'), + (0xFAB7, 'M', '覆'), + (0xFAB8, 'M', '視'), + (0xFAB9, 'M', '調'), + (0xFABA, 'M', '諸'), + (0xFABB, 'M', '請'), + (0xFABC, 'M', '謁'), + (0xFABD, 'M', '諾'), + (0xFABE, 'M', '諭'), + (0xFABF, 'M', '謹'), + (0xFAC0, 'M', '變'), + (0xFAC1, 'M', '贈'), + (0xFAC2, 'M', '輸'), + (0xFAC3, 'M', '遲'), + (0xFAC4, 'M', '醙'), + (0xFAC5, 'M', '鉶'), + (0xFAC6, 'M', '陼'), + (0xFAC7, 'M', '難'), + (0xFAC8, 'M', '靖'), + (0xFAC9, 'M', '韛'), + (0xFACA, 'M', '響'), + (0xFACB, 'M', '頋'), + (0xFACC, 'M', '頻'), + (0xFACD, 'M', '鬒'), + (0xFACE, 'M', '龜'), + (0xFACF, 'M', '𢡊'), + (0xFAD0, 'M', '𢡄'), + (0xFAD1, 'M', '𣏕'), + (0xFAD2, 'M', '㮝'), + (0xFAD3, 'M', '䀘'), + (0xFAD4, 'M', '䀹'), + (0xFAD5, 'M', '𥉉'), + (0xFAD6, 'M', '𥳐'), + (0xFAD7, 'M', '𧻓'), + (0xFAD8, 'M', '齃'), + (0xFAD9, 'M', '龎'), + (0xFADA, 'X'), + (0xFB00, 'M', 'ff'), + (0xFB01, 'M', 'fi'), + ] + +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFB02, 'M', 'fl'), + (0xFB03, 'M', 'ffi'), + (0xFB04, 'M', 'ffl'), + (0xFB05, 'M', 'st'), + (0xFB07, 'X'), + (0xFB13, 'M', 'մն'), + (0xFB14, 'M', 'մե'), + (0xFB15, 'M', 'մի'), + (0xFB16, 'M', 'վն'), + (0xFB17, 'M', 'մխ'), + (0xFB18, 'X'), + (0xFB1D, 'M', 'יִ'), + (0xFB1E, 'V'), + (0xFB1F, 'M', 'ײַ'), + (0xFB20, 'M', 'ע'), + (0xFB21, 'M', 'א'), + (0xFB22, 'M', 'ד'), + (0xFB23, 'M', 'ה'), + (0xFB24, 'M', 'כ'), + (0xFB25, 'M', 'ל'), + (0xFB26, 'M', 'ם'), + (0xFB27, 'M', 'ר'), + (0xFB28, 'M', 'ת'), + (0xFB29, '3', '+'), + (0xFB2A, 'M', 'שׁ'), + (0xFB2B, 'M', 'שׂ'), + (0xFB2C, 'M', 'שּׁ'), + (0xFB2D, 'M', 'שּׂ'), + (0xFB2E, 'M', 'אַ'), + (0xFB2F, 'M', 'אָ'), + (0xFB30, 'M', 'אּ'), + (0xFB31, 'M', 'בּ'), + (0xFB32, 'M', 'גּ'), + (0xFB33, 'M', 'דּ'), + (0xFB34, 'M', 'הּ'), + (0xFB35, 'M', 'וּ'), + (0xFB36, 'M', 'זּ'), + (0xFB37, 'X'), + (0xFB38, 'M', 'טּ'), + (0xFB39, 'M', 'יּ'), + (0xFB3A, 'M', 'ךּ'), + (0xFB3B, 'M', 'כּ'), + (0xFB3C, 'M', 'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', 'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', 'נּ'), + (0xFB41, 'M', 'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', 'ףּ'), + (0xFB44, 'M', 'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', 'צּ'), + (0xFB47, 'M', 'קּ'), + (0xFB48, 'M', 'רּ'), + (0xFB49, 'M', 'שּ'), + (0xFB4A, 'M', 'תּ'), + (0xFB4B, 'M', 'וֹ'), + (0xFB4C, 'M', 'בֿ'), + (0xFB4D, 'M', 'כֿ'), + (0xFB4E, 'M', 'פֿ'), + (0xFB4F, 'M', 'אל'), + (0xFB50, 'M', 'ٱ'), + (0xFB52, 'M', 'ٻ'), + (0xFB56, 'M', 'پ'), + (0xFB5A, 'M', 'ڀ'), + (0xFB5E, 'M', 'ٺ'), + (0xFB62, 'M', 'ٿ'), + (0xFB66, 'M', 'ٹ'), + (0xFB6A, 'M', 'ڤ'), + (0xFB6E, 'M', 'ڦ'), + (0xFB72, 'M', 'ڄ'), + (0xFB76, 'M', 'ڃ'), + (0xFB7A, 'M', 'چ'), + (0xFB7E, 'M', 'ڇ'), + (0xFB82, 'M', 'ڍ'), + (0xFB84, 'M', 'ڌ'), + (0xFB86, 'M', 'ڎ'), + (0xFB88, 'M', 'ڈ'), + (0xFB8A, 'M', 'ژ'), + (0xFB8C, 'M', 'ڑ'), + (0xFB8E, 'M', 'ک'), + (0xFB92, 'M', 'گ'), + (0xFB96, 'M', 'ڳ'), + (0xFB9A, 'M', 'ڱ'), + (0xFB9E, 'M', 'ں'), + (0xFBA0, 'M', 'ڻ'), + (0xFBA4, 'M', 'ۀ'), + (0xFBA6, 'M', 'ہ'), + (0xFBAA, 'M', 'ھ'), + (0xFBAE, 'M', 'ے'), + (0xFBB0, 'M', 'ۓ'), + (0xFBB2, 'V'), + (0xFBC3, 'X'), + (0xFBD3, 'M', 'ڭ'), + (0xFBD7, 'M', 'ۇ'), + (0xFBD9, 'M', 'ۆ'), + (0xFBDB, 'M', 'ۈ'), + (0xFBDD, 'M', 'ۇٴ'), + (0xFBDE, 'M', 'ۋ'), + ] + +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFBE0, 'M', 'ۅ'), + (0xFBE2, 'M', 'ۉ'), + (0xFBE4, 'M', 'ې'), + (0xFBE8, 'M', 'ى'), + (0xFBEA, 'M', 'ئا'), + (0xFBEC, 'M', 'ئە'), + (0xFBEE, 'M', 'ئو'), + (0xFBF0, 'M', 'ئۇ'), + (0xFBF2, 'M', 'ئۆ'), + (0xFBF4, 'M', 'ئۈ'), + (0xFBF6, 'M', 'ئې'), + (0xFBF9, 'M', 'ئى'), + (0xFBFC, 'M', 'ی'), + (0xFC00, 'M', 'ئج'), + (0xFC01, 'M', 'ئح'), + (0xFC02, 'M', 'ئم'), + (0xFC03, 'M', 'ئى'), + (0xFC04, 'M', 'ئي'), + (0xFC05, 'M', 'بج'), + (0xFC06, 'M', 'بح'), + (0xFC07, 'M', 'بخ'), + (0xFC08, 'M', 'بم'), + (0xFC09, 'M', 'بى'), + (0xFC0A, 'M', 'بي'), + (0xFC0B, 'M', 'تج'), + (0xFC0C, 'M', 'تح'), + (0xFC0D, 'M', 'تخ'), + (0xFC0E, 'M', 'تم'), + (0xFC0F, 'M', 'تى'), + (0xFC10, 'M', 'تي'), + (0xFC11, 'M', 'ثج'), + (0xFC12, 'M', 'ثم'), + (0xFC13, 'M', 'ثى'), + (0xFC14, 'M', 'ثي'), + (0xFC15, 'M', 'جح'), + (0xFC16, 'M', 'جم'), + (0xFC17, 'M', 'حج'), + (0xFC18, 'M', 'حم'), + (0xFC19, 'M', 'خج'), + (0xFC1A, 'M', 'خح'), + (0xFC1B, 'M', 'خم'), + (0xFC1C, 'M', 'سج'), + (0xFC1D, 'M', 'سح'), + (0xFC1E, 'M', 'سخ'), + (0xFC1F, 'M', 'سم'), + (0xFC20, 'M', 'صح'), + (0xFC21, 'M', 'صم'), + (0xFC22, 'M', 'ضج'), + (0xFC23, 'M', 'ضح'), + (0xFC24, 'M', 'ضخ'), + (0xFC25, 'M', 'ضم'), + (0xFC26, 'M', 'طح'), + (0xFC27, 'M', 'طم'), + (0xFC28, 'M', 'ظم'), + (0xFC29, 'M', 'عج'), + (0xFC2A, 'M', 'عم'), + (0xFC2B, 'M', 'غج'), + (0xFC2C, 'M', 'غم'), + (0xFC2D, 'M', 'فج'), + (0xFC2E, 'M', 'فح'), + (0xFC2F, 'M', 'فخ'), + (0xFC30, 'M', 'فم'), + (0xFC31, 'M', 'فى'), + (0xFC32, 'M', 'في'), + (0xFC33, 'M', 'قح'), + (0xFC34, 'M', 'قم'), + (0xFC35, 'M', 'قى'), + (0xFC36, 'M', 'قي'), + (0xFC37, 'M', 'كا'), + (0xFC38, 'M', 'كج'), + (0xFC39, 'M', 'كح'), + (0xFC3A, 'M', 'كخ'), + (0xFC3B, 'M', 'كل'), + (0xFC3C, 'M', 'كم'), + (0xFC3D, 'M', 'كى'), + (0xFC3E, 'M', 'كي'), + (0xFC3F, 'M', 'لج'), + (0xFC40, 'M', 'لح'), + (0xFC41, 'M', 'لخ'), + (0xFC42, 'M', 'لم'), + (0xFC43, 'M', 'لى'), + (0xFC44, 'M', 'لي'), + (0xFC45, 'M', 'مج'), + (0xFC46, 'M', 'مح'), + (0xFC47, 'M', 'مخ'), + (0xFC48, 'M', 'مم'), + (0xFC49, 'M', 'مى'), + (0xFC4A, 'M', 'مي'), + (0xFC4B, 'M', 'نج'), + (0xFC4C, 'M', 'نح'), + (0xFC4D, 'M', 'نخ'), + (0xFC4E, 'M', 'نم'), + (0xFC4F, 'M', 'نى'), + (0xFC50, 'M', 'ني'), + (0xFC51, 'M', 'هج'), + (0xFC52, 'M', 'هم'), + (0xFC53, 'M', 'هى'), + (0xFC54, 'M', 'هي'), + (0xFC55, 'M', 'يج'), + (0xFC56, 'M', 'يح'), + ] + +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFC57, 'M', 'يخ'), + (0xFC58, 'M', 'يم'), + (0xFC59, 'M', 'يى'), + (0xFC5A, 'M', 'يي'), + (0xFC5B, 'M', 'ذٰ'), + (0xFC5C, 'M', 'رٰ'), + (0xFC5D, 'M', 'ىٰ'), + (0xFC5E, '3', ' ٌّ'), + (0xFC5F, '3', ' ٍّ'), + (0xFC60, '3', ' َّ'), + (0xFC61, '3', ' ُّ'), + (0xFC62, '3', ' ِّ'), + (0xFC63, '3', ' ّٰ'), + (0xFC64, 'M', 'ئر'), + (0xFC65, 'M', 'ئز'), + (0xFC66, 'M', 'ئم'), + (0xFC67, 'M', 'ئن'), + (0xFC68, 'M', 'ئى'), + (0xFC69, 'M', 'ئي'), + (0xFC6A, 'M', 'بر'), + (0xFC6B, 'M', 'بز'), + (0xFC6C, 'M', 'بم'), + (0xFC6D, 'M', 'بن'), + (0xFC6E, 'M', 'بى'), + (0xFC6F, 'M', 'بي'), + (0xFC70, 'M', 'تر'), + (0xFC71, 'M', 'تز'), + (0xFC72, 'M', 'تم'), + (0xFC73, 'M', 'تن'), + (0xFC74, 'M', 'تى'), + (0xFC75, 'M', 'تي'), + (0xFC76, 'M', 'ثر'), + (0xFC77, 'M', 'ثز'), + (0xFC78, 'M', 'ثم'), + (0xFC79, 'M', 'ثن'), + (0xFC7A, 'M', 'ثى'), + (0xFC7B, 'M', 'ثي'), + (0xFC7C, 'M', 'فى'), + (0xFC7D, 'M', 'في'), + (0xFC7E, 'M', 'قى'), + (0xFC7F, 'M', 'قي'), + (0xFC80, 'M', 'كا'), + (0xFC81, 'M', 'كل'), + (0xFC82, 'M', 'كم'), + (0xFC83, 'M', 'كى'), + (0xFC84, 'M', 'كي'), + (0xFC85, 'M', 'لم'), + (0xFC86, 'M', 'لى'), + (0xFC87, 'M', 'لي'), + (0xFC88, 'M', 'ما'), + (0xFC89, 'M', 'مم'), + (0xFC8A, 'M', 'نر'), + (0xFC8B, 'M', 'نز'), + (0xFC8C, 'M', 'نم'), + (0xFC8D, 'M', 'نن'), + (0xFC8E, 'M', 'نى'), + (0xFC8F, 'M', 'ني'), + (0xFC90, 'M', 'ىٰ'), + (0xFC91, 'M', 'ير'), + (0xFC92, 'M', 'يز'), + (0xFC93, 'M', 'يم'), + (0xFC94, 'M', 'ين'), + (0xFC95, 'M', 'يى'), + (0xFC96, 'M', 'يي'), + (0xFC97, 'M', 'ئج'), + (0xFC98, 'M', 'ئح'), + (0xFC99, 'M', 'ئخ'), + (0xFC9A, 'M', 'ئم'), + (0xFC9B, 'M', 'ئه'), + (0xFC9C, 'M', 'بج'), + (0xFC9D, 'M', 'بح'), + (0xFC9E, 'M', 'بخ'), + (0xFC9F, 'M', 'بم'), + (0xFCA0, 'M', 'به'), + (0xFCA1, 'M', 'تج'), + (0xFCA2, 'M', 'تح'), + (0xFCA3, 'M', 'تخ'), + (0xFCA4, 'M', 'تم'), + (0xFCA5, 'M', 'ته'), + (0xFCA6, 'M', 'ثم'), + (0xFCA7, 'M', 'جح'), + (0xFCA8, 'M', 'جم'), + (0xFCA9, 'M', 'حج'), + (0xFCAA, 'M', 'حم'), + (0xFCAB, 'M', 'خج'), + (0xFCAC, 'M', 'خم'), + (0xFCAD, 'M', 'سج'), + (0xFCAE, 'M', 'سح'), + (0xFCAF, 'M', 'سخ'), + (0xFCB0, 'M', 'سم'), + (0xFCB1, 'M', 'صح'), + (0xFCB2, 'M', 'صخ'), + (0xFCB3, 'M', 'صم'), + (0xFCB4, 'M', 'ضج'), + (0xFCB5, 'M', 'ضح'), + (0xFCB6, 'M', 'ضخ'), + (0xFCB7, 'M', 'ضم'), + (0xFCB8, 'M', 'طح'), + (0xFCB9, 'M', 'ظم'), + (0xFCBA, 'M', 'عج'), + ] + +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFCBB, 'M', 'عم'), + (0xFCBC, 'M', 'غج'), + (0xFCBD, 'M', 'غم'), + (0xFCBE, 'M', 'فج'), + (0xFCBF, 'M', 'فح'), + (0xFCC0, 'M', 'فخ'), + (0xFCC1, 'M', 'فم'), + (0xFCC2, 'M', 'قح'), + (0xFCC3, 'M', 'قم'), + (0xFCC4, 'M', 'كج'), + (0xFCC5, 'M', 'كح'), + (0xFCC6, 'M', 'كخ'), + (0xFCC7, 'M', 'كل'), + (0xFCC8, 'M', 'كم'), + (0xFCC9, 'M', 'لج'), + (0xFCCA, 'M', 'لح'), + (0xFCCB, 'M', 'لخ'), + (0xFCCC, 'M', 'لم'), + (0xFCCD, 'M', 'له'), + (0xFCCE, 'M', 'مج'), + (0xFCCF, 'M', 'مح'), + (0xFCD0, 'M', 'مخ'), + (0xFCD1, 'M', 'مم'), + (0xFCD2, 'M', 'نج'), + (0xFCD3, 'M', 'نح'), + (0xFCD4, 'M', 'نخ'), + (0xFCD5, 'M', 'نم'), + (0xFCD6, 'M', 'نه'), + (0xFCD7, 'M', 'هج'), + (0xFCD8, 'M', 'هم'), + (0xFCD9, 'M', 'هٰ'), + (0xFCDA, 'M', 'يج'), + (0xFCDB, 'M', 'يح'), + (0xFCDC, 'M', 'يخ'), + (0xFCDD, 'M', 'يم'), + (0xFCDE, 'M', 'يه'), + (0xFCDF, 'M', 'ئم'), + (0xFCE0, 'M', 'ئه'), + (0xFCE1, 'M', 'بم'), + (0xFCE2, 'M', 'به'), + (0xFCE3, 'M', 'تم'), + (0xFCE4, 'M', 'ته'), + (0xFCE5, 'M', 'ثم'), + (0xFCE6, 'M', 'ثه'), + (0xFCE7, 'M', 'سم'), + (0xFCE8, 'M', 'سه'), + (0xFCE9, 'M', 'شم'), + (0xFCEA, 'M', 'شه'), + (0xFCEB, 'M', 'كل'), + (0xFCEC, 'M', 'كم'), + (0xFCED, 'M', 'لم'), + (0xFCEE, 'M', 'نم'), + (0xFCEF, 'M', 'نه'), + (0xFCF0, 'M', 'يم'), + (0xFCF1, 'M', 'يه'), + (0xFCF2, 'M', 'ـَّ'), + (0xFCF3, 'M', 'ـُّ'), + (0xFCF4, 'M', 'ـِّ'), + (0xFCF5, 'M', 'طى'), + (0xFCF6, 'M', 'طي'), + (0xFCF7, 'M', 'عى'), + (0xFCF8, 'M', 'عي'), + (0xFCF9, 'M', 'غى'), + (0xFCFA, 'M', 'غي'), + (0xFCFB, 'M', 'سى'), + (0xFCFC, 'M', 'سي'), + (0xFCFD, 'M', 'شى'), + (0xFCFE, 'M', 'شي'), + (0xFCFF, 'M', 'حى'), + (0xFD00, 'M', 'حي'), + (0xFD01, 'M', 'جى'), + (0xFD02, 'M', 'جي'), + (0xFD03, 'M', 'خى'), + (0xFD04, 'M', 'خي'), + (0xFD05, 'M', 'صى'), + (0xFD06, 'M', 'صي'), + (0xFD07, 'M', 'ضى'), + (0xFD08, 'M', 'ضي'), + (0xFD09, 'M', 'شج'), + (0xFD0A, 'M', 'شح'), + (0xFD0B, 'M', 'شخ'), + (0xFD0C, 'M', 'شم'), + (0xFD0D, 'M', 'شر'), + (0xFD0E, 'M', 'سر'), + (0xFD0F, 'M', 'صر'), + (0xFD10, 'M', 'ضر'), + (0xFD11, 'M', 'طى'), + (0xFD12, 'M', 'طي'), + (0xFD13, 'M', 'عى'), + (0xFD14, 'M', 'عي'), + (0xFD15, 'M', 'غى'), + (0xFD16, 'M', 'غي'), + (0xFD17, 'M', 'سى'), + (0xFD18, 'M', 'سي'), + (0xFD19, 'M', 'شى'), + (0xFD1A, 'M', 'شي'), + (0xFD1B, 'M', 'حى'), + (0xFD1C, 'M', 'حي'), + (0xFD1D, 'M', 'جى'), + (0xFD1E, 'M', 'جي'), + ] + +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFD1F, 'M', 'خى'), + (0xFD20, 'M', 'خي'), + (0xFD21, 'M', 'صى'), + (0xFD22, 'M', 'صي'), + (0xFD23, 'M', 'ضى'), + (0xFD24, 'M', 'ضي'), + (0xFD25, 'M', 'شج'), + (0xFD26, 'M', 'شح'), + (0xFD27, 'M', 'شخ'), + (0xFD28, 'M', 'شم'), + (0xFD29, 'M', 'شر'), + (0xFD2A, 'M', 'سر'), + (0xFD2B, 'M', 'صر'), + (0xFD2C, 'M', 'ضر'), + (0xFD2D, 'M', 'شج'), + (0xFD2E, 'M', 'شح'), + (0xFD2F, 'M', 'شخ'), + (0xFD30, 'M', 'شم'), + (0xFD31, 'M', 'سه'), + (0xFD32, 'M', 'شه'), + (0xFD33, 'M', 'طم'), + (0xFD34, 'M', 'سج'), + (0xFD35, 'M', 'سح'), + (0xFD36, 'M', 'سخ'), + (0xFD37, 'M', 'شج'), + (0xFD38, 'M', 'شح'), + (0xFD39, 'M', 'شخ'), + (0xFD3A, 'M', 'طم'), + (0xFD3B, 'M', 'ظم'), + (0xFD3C, 'M', 'اً'), + (0xFD3E, 'V'), + (0xFD50, 'M', 'تجم'), + (0xFD51, 'M', 'تحج'), + (0xFD53, 'M', 'تحم'), + (0xFD54, 'M', 'تخم'), + (0xFD55, 'M', 'تمج'), + (0xFD56, 'M', 'تمح'), + (0xFD57, 'M', 'تمخ'), + (0xFD58, 'M', 'جمح'), + (0xFD5A, 'M', 'حمي'), + (0xFD5B, 'M', 'حمى'), + (0xFD5C, 'M', 'سحج'), + (0xFD5D, 'M', 'سجح'), + (0xFD5E, 'M', 'سجى'), + (0xFD5F, 'M', 'سمح'), + (0xFD61, 'M', 'سمج'), + (0xFD62, 'M', 'سمم'), + (0xFD64, 'M', 'صحح'), + (0xFD66, 'M', 'صمم'), + (0xFD67, 'M', 'شحم'), + (0xFD69, 'M', 'شجي'), + (0xFD6A, 'M', 'شمخ'), + (0xFD6C, 'M', 'شمم'), + (0xFD6E, 'M', 'ضحى'), + (0xFD6F, 'M', 'ضخم'), + (0xFD71, 'M', 'طمح'), + (0xFD73, 'M', 'طمم'), + (0xFD74, 'M', 'طمي'), + (0xFD75, 'M', 'عجم'), + (0xFD76, 'M', 'عمم'), + (0xFD78, 'M', 'عمى'), + (0xFD79, 'M', 'غمم'), + (0xFD7A, 'M', 'غمي'), + (0xFD7B, 'M', 'غمى'), + (0xFD7C, 'M', 'فخم'), + (0xFD7E, 'M', 'قمح'), + (0xFD7F, 'M', 'قمم'), + (0xFD80, 'M', 'لحم'), + (0xFD81, 'M', 'لحي'), + (0xFD82, 'M', 'لحى'), + (0xFD83, 'M', 'لجج'), + (0xFD85, 'M', 'لخم'), + (0xFD87, 'M', 'لمح'), + (0xFD89, 'M', 'محج'), + (0xFD8A, 'M', 'محم'), + (0xFD8B, 'M', 'محي'), + (0xFD8C, 'M', 'مجح'), + (0xFD8D, 'M', 'مجم'), + (0xFD8E, 'M', 'مخج'), + (0xFD8F, 'M', 'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', 'مجخ'), + (0xFD93, 'M', 'همج'), + (0xFD94, 'M', 'همم'), + (0xFD95, 'M', 'نحم'), + (0xFD96, 'M', 'نحى'), + (0xFD97, 'M', 'نجم'), + (0xFD99, 'M', 'نجى'), + (0xFD9A, 'M', 'نمي'), + (0xFD9B, 'M', 'نمى'), + (0xFD9C, 'M', 'يمم'), + (0xFD9E, 'M', 'بخي'), + (0xFD9F, 'M', 'تجي'), + (0xFDA0, 'M', 'تجى'), + (0xFDA1, 'M', 'تخي'), + (0xFDA2, 'M', 'تخى'), + (0xFDA3, 'M', 'تمي'), + (0xFDA4, 'M', 'تمى'), + (0xFDA5, 'M', 'جمي'), + (0xFDA6, 'M', 'جحى'), + ] + +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFDA7, 'M', 'جمى'), + (0xFDA8, 'M', 'سخى'), + (0xFDA9, 'M', 'صحي'), + (0xFDAA, 'M', 'شحي'), + (0xFDAB, 'M', 'ضحي'), + (0xFDAC, 'M', 'لجي'), + (0xFDAD, 'M', 'لمي'), + (0xFDAE, 'M', 'يحي'), + (0xFDAF, 'M', 'يجي'), + (0xFDB0, 'M', 'يمي'), + (0xFDB1, 'M', 'ممي'), + (0xFDB2, 'M', 'قمي'), + (0xFDB3, 'M', 'نحي'), + (0xFDB4, 'M', 'قمح'), + (0xFDB5, 'M', 'لحم'), + (0xFDB6, 'M', 'عمي'), + (0xFDB7, 'M', 'كمي'), + (0xFDB8, 'M', 'نجح'), + (0xFDB9, 'M', 'مخي'), + (0xFDBA, 'M', 'لجم'), + (0xFDBB, 'M', 'كمم'), + (0xFDBC, 'M', 'لجم'), + (0xFDBD, 'M', 'نجح'), + (0xFDBE, 'M', 'جحي'), + (0xFDBF, 'M', 'حجي'), + (0xFDC0, 'M', 'مجي'), + (0xFDC1, 'M', 'فمي'), + (0xFDC2, 'M', 'بحي'), + (0xFDC3, 'M', 'كمم'), + (0xFDC4, 'M', 'عجم'), + (0xFDC5, 'M', 'صمم'), + (0xFDC6, 'M', 'سخي'), + (0xFDC7, 'M', 'نجي'), + (0xFDC8, 'X'), + (0xFDCF, 'V'), + (0xFDD0, 'X'), + (0xFDF0, 'M', 'صلے'), + (0xFDF1, 'M', 'قلے'), + (0xFDF2, 'M', 'الله'), + (0xFDF3, 'M', 'اكبر'), + (0xFDF4, 'M', 'محمد'), + (0xFDF5, 'M', 'صلعم'), + (0xFDF6, 'M', 'رسول'), + (0xFDF7, 'M', 'عليه'), + (0xFDF8, 'M', 'وسلم'), + (0xFDF9, 'M', 'صلى'), + (0xFDFA, '3', 'صلى الله عليه وسلم'), + (0xFDFB, '3', 'جل جلاله'), + (0xFDFC, 'M', 'ریال'), + (0xFDFD, 'V'), + (0xFE00, 'I'), + (0xFE10, '3', ','), + (0xFE11, 'M', '、'), + (0xFE12, 'X'), + (0xFE13, '3', ':'), + (0xFE14, '3', ';'), + (0xFE15, '3', '!'), + (0xFE16, '3', '?'), + (0xFE17, 'M', '〖'), + (0xFE18, 'M', '〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE30, 'X'), + (0xFE31, 'M', '—'), + (0xFE32, 'M', '–'), + (0xFE33, '3', '_'), + (0xFE35, '3', '('), + (0xFE36, '3', ')'), + (0xFE37, '3', '{'), + (0xFE38, '3', '}'), + (0xFE39, 'M', '〔'), + (0xFE3A, 'M', '〕'), + (0xFE3B, 'M', '【'), + (0xFE3C, 'M', '】'), + (0xFE3D, 'M', '《'), + (0xFE3E, 'M', '》'), + (0xFE3F, 'M', '〈'), + (0xFE40, 'M', '〉'), + (0xFE41, 'M', '「'), + (0xFE42, 'M', '」'), + (0xFE43, 'M', '『'), + (0xFE44, 'M', '』'), + (0xFE45, 'V'), + (0xFE47, '3', '['), + (0xFE48, '3', ']'), + (0xFE49, '3', ' ̅'), + (0xFE4D, '3', '_'), + (0xFE50, '3', ','), + (0xFE51, 'M', '、'), + (0xFE52, 'X'), + (0xFE54, '3', ';'), + (0xFE55, '3', ':'), + (0xFE56, '3', '?'), + (0xFE57, '3', '!'), + (0xFE58, 'M', '—'), + (0xFE59, '3', '('), + (0xFE5A, '3', ')'), + (0xFE5B, '3', '{'), + (0xFE5C, '3', '}'), + (0xFE5D, 'M', '〔'), + ] + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFE5E, 'M', '〕'), + (0xFE5F, '3', '#'), + (0xFE60, '3', '&'), + (0xFE61, '3', '*'), + (0xFE62, '3', '+'), + (0xFE63, 'M', '-'), + (0xFE64, '3', '<'), + (0xFE65, '3', '>'), + (0xFE66, '3', '='), + (0xFE67, 'X'), + (0xFE68, '3', '\\'), + (0xFE69, '3', '$'), + (0xFE6A, '3', '%'), + (0xFE6B, '3', '@'), + (0xFE6C, 'X'), + (0xFE70, '3', ' ً'), + (0xFE71, 'M', 'ـً'), + (0xFE72, '3', ' ٌ'), + (0xFE73, 'V'), + (0xFE74, '3', ' ٍ'), + (0xFE75, 'X'), + (0xFE76, '3', ' َ'), + (0xFE77, 'M', 'ـَ'), + (0xFE78, '3', ' ُ'), + (0xFE79, 'M', 'ـُ'), + (0xFE7A, '3', ' ِ'), + (0xFE7B, 'M', 'ـِ'), + (0xFE7C, '3', ' ّ'), + (0xFE7D, 'M', 'ـّ'), + (0xFE7E, '3', ' ْ'), + (0xFE7F, 'M', 'ـْ'), + (0xFE80, 'M', 'ء'), + (0xFE81, 'M', 'آ'), + (0xFE83, 'M', 'أ'), + (0xFE85, 'M', 'ؤ'), + (0xFE87, 'M', 'إ'), + (0xFE89, 'M', 'ئ'), + (0xFE8D, 'M', 'ا'), + (0xFE8F, 'M', 'ب'), + (0xFE93, 'M', 'ة'), + (0xFE95, 'M', 'ت'), + (0xFE99, 'M', 'ث'), + (0xFE9D, 'M', 'ج'), + (0xFEA1, 'M', 'ح'), + (0xFEA5, 'M', 'خ'), + (0xFEA9, 'M', 'د'), + (0xFEAB, 'M', 'ذ'), + (0xFEAD, 'M', 'ر'), + (0xFEAF, 'M', 'ز'), + (0xFEB1, 'M', 'س'), + (0xFEB5, 'M', 'ش'), + (0xFEB9, 'M', 'ص'), + (0xFEBD, 'M', 'ض'), + (0xFEC1, 'M', 'ط'), + (0xFEC5, 'M', 'ظ'), + (0xFEC9, 'M', 'ع'), + (0xFECD, 'M', 'غ'), + (0xFED1, 'M', 'ف'), + (0xFED5, 'M', 'ق'), + (0xFED9, 'M', 'ك'), + (0xFEDD, 'M', 'ل'), + (0xFEE1, 'M', 'م'), + (0xFEE5, 'M', 'ن'), + (0xFEE9, 'M', 'ه'), + (0xFEED, 'M', 'و'), + (0xFEEF, 'M', 'ى'), + (0xFEF1, 'M', 'ي'), + (0xFEF5, 'M', 'لآ'), + (0xFEF7, 'M', 'لأ'), + (0xFEF9, 'M', 'لإ'), + (0xFEFB, 'M', 'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', '!'), + (0xFF02, '3', '"'), + (0xFF03, '3', '#'), + (0xFF04, '3', '$'), + (0xFF05, '3', '%'), + (0xFF06, '3', '&'), + (0xFF07, '3', '\''), + (0xFF08, '3', '('), + (0xFF09, '3', ')'), + (0xFF0A, '3', '*'), + (0xFF0B, '3', '+'), + (0xFF0C, '3', ','), + (0xFF0D, 'M', '-'), + (0xFF0E, 'M', '.'), + (0xFF0F, '3', '/'), + (0xFF10, 'M', '0'), + (0xFF11, 'M', '1'), + (0xFF12, 'M', '2'), + (0xFF13, 'M', '3'), + (0xFF14, 'M', '4'), + (0xFF15, 'M', '5'), + (0xFF16, 'M', '6'), + (0xFF17, 'M', '7'), + (0xFF18, 'M', '8'), + (0xFF19, 'M', '9'), + (0xFF1A, '3', ':'), + ] + +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF1B, '3', ';'), + (0xFF1C, '3', '<'), + (0xFF1D, '3', '='), + (0xFF1E, '3', '>'), + (0xFF1F, '3', '?'), + (0xFF20, '3', '@'), + (0xFF21, 'M', 'a'), + (0xFF22, 'M', 'b'), + (0xFF23, 'M', 'c'), + (0xFF24, 'M', 'd'), + (0xFF25, 'M', 'e'), + (0xFF26, 'M', 'f'), + (0xFF27, 'M', 'g'), + (0xFF28, 'M', 'h'), + (0xFF29, 'M', 'i'), + (0xFF2A, 'M', 'j'), + (0xFF2B, 'M', 'k'), + (0xFF2C, 'M', 'l'), + (0xFF2D, 'M', 'm'), + (0xFF2E, 'M', 'n'), + (0xFF2F, 'M', 'o'), + (0xFF30, 'M', 'p'), + (0xFF31, 'M', 'q'), + (0xFF32, 'M', 'r'), + (0xFF33, 'M', 's'), + (0xFF34, 'M', 't'), + (0xFF35, 'M', 'u'), + (0xFF36, 'M', 'v'), + (0xFF37, 'M', 'w'), + (0xFF38, 'M', 'x'), + (0xFF39, 'M', 'y'), + (0xFF3A, 'M', 'z'), + (0xFF3B, '3', '['), + (0xFF3C, '3', '\\'), + (0xFF3D, '3', ']'), + (0xFF3E, '3', '^'), + (0xFF3F, '3', '_'), + (0xFF40, '3', '`'), + (0xFF41, 'M', 'a'), + (0xFF42, 'M', 'b'), + (0xFF43, 'M', 'c'), + (0xFF44, 'M', 'd'), + (0xFF45, 'M', 'e'), + (0xFF46, 'M', 'f'), + (0xFF47, 'M', 'g'), + (0xFF48, 'M', 'h'), + (0xFF49, 'M', 'i'), + (0xFF4A, 'M', 'j'), + (0xFF4B, 'M', 'k'), + (0xFF4C, 'M', 'l'), + (0xFF4D, 'M', 'm'), + (0xFF4E, 'M', 'n'), + (0xFF4F, 'M', 'o'), + (0xFF50, 'M', 'p'), + (0xFF51, 'M', 'q'), + (0xFF52, 'M', 'r'), + (0xFF53, 'M', 's'), + (0xFF54, 'M', 't'), + (0xFF55, 'M', 'u'), + (0xFF56, 'M', 'v'), + (0xFF57, 'M', 'w'), + (0xFF58, 'M', 'x'), + (0xFF59, 'M', 'y'), + (0xFF5A, 'M', 'z'), + (0xFF5B, '3', '{'), + (0xFF5C, '3', '|'), + (0xFF5D, '3', '}'), + (0xFF5E, '3', '~'), + (0xFF5F, 'M', '⦅'), + (0xFF60, 'M', '⦆'), + (0xFF61, 'M', '.'), + (0xFF62, 'M', '「'), + (0xFF63, 'M', '」'), + (0xFF64, 'M', '、'), + (0xFF65, 'M', '・'), + (0xFF66, 'M', 'ヲ'), + (0xFF67, 'M', 'ァ'), + (0xFF68, 'M', 'ィ'), + (0xFF69, 'M', 'ゥ'), + (0xFF6A, 'M', 'ェ'), + (0xFF6B, 'M', 'ォ'), + (0xFF6C, 'M', 'ャ'), + (0xFF6D, 'M', 'ュ'), + (0xFF6E, 'M', 'ョ'), + (0xFF6F, 'M', 'ッ'), + (0xFF70, 'M', 'ー'), + (0xFF71, 'M', 'ア'), + (0xFF72, 'M', 'イ'), + (0xFF73, 'M', 'ウ'), + (0xFF74, 'M', 'エ'), + (0xFF75, 'M', 'オ'), + (0xFF76, 'M', 'カ'), + (0xFF77, 'M', 'キ'), + (0xFF78, 'M', 'ク'), + (0xFF79, 'M', 'ケ'), + (0xFF7A, 'M', 'コ'), + (0xFF7B, 'M', 'サ'), + (0xFF7C, 'M', 'シ'), + (0xFF7D, 'M', 'ス'), + (0xFF7E, 'M', 'セ'), + ] + +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF7F, 'M', 'ソ'), + (0xFF80, 'M', 'タ'), + (0xFF81, 'M', 'チ'), + (0xFF82, 'M', 'ツ'), + (0xFF83, 'M', 'テ'), + (0xFF84, 'M', 'ト'), + (0xFF85, 'M', 'ナ'), + (0xFF86, 'M', 'ニ'), + (0xFF87, 'M', 'ヌ'), + (0xFF88, 'M', 'ネ'), + (0xFF89, 'M', 'ノ'), + (0xFF8A, 'M', 'ハ'), + (0xFF8B, 'M', 'ヒ'), + (0xFF8C, 'M', 'フ'), + (0xFF8D, 'M', 'ヘ'), + (0xFF8E, 'M', 'ホ'), + (0xFF8F, 'M', 'マ'), + (0xFF90, 'M', 'ミ'), + (0xFF91, 'M', 'ム'), + (0xFF92, 'M', 'メ'), + (0xFF93, 'M', 'モ'), + (0xFF94, 'M', 'ヤ'), + (0xFF95, 'M', 'ユ'), + (0xFF96, 'M', 'ヨ'), + (0xFF97, 'M', 'ラ'), + (0xFF98, 'M', 'リ'), + (0xFF99, 'M', 'ル'), + (0xFF9A, 'M', 'レ'), + (0xFF9B, 'M', 'ロ'), + (0xFF9C, 'M', 'ワ'), + (0xFF9D, 'M', 'ン'), + (0xFF9E, 'M', '゙'), + (0xFF9F, 'M', '゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', 'ᄀ'), + (0xFFA2, 'M', 'ᄁ'), + (0xFFA3, 'M', 'ᆪ'), + (0xFFA4, 'M', 'ᄂ'), + (0xFFA5, 'M', 'ᆬ'), + (0xFFA6, 'M', 'ᆭ'), + (0xFFA7, 'M', 'ᄃ'), + (0xFFA8, 'M', 'ᄄ'), + (0xFFA9, 'M', 'ᄅ'), + (0xFFAA, 'M', 'ᆰ'), + (0xFFAB, 'M', 'ᆱ'), + (0xFFAC, 'M', 'ᆲ'), + (0xFFAD, 'M', 'ᆳ'), + (0xFFAE, 'M', 'ᆴ'), + (0xFFAF, 'M', 'ᆵ'), + (0xFFB0, 'M', 'ᄚ'), + (0xFFB1, 'M', 'ᄆ'), + (0xFFB2, 'M', 'ᄇ'), + (0xFFB3, 'M', 'ᄈ'), + (0xFFB4, 'M', 'ᄡ'), + (0xFFB5, 'M', 'ᄉ'), + (0xFFB6, 'M', 'ᄊ'), + (0xFFB7, 'M', 'ᄋ'), + (0xFFB8, 'M', 'ᄌ'), + (0xFFB9, 'M', 'ᄍ'), + (0xFFBA, 'M', 'ᄎ'), + (0xFFBB, 'M', 'ᄏ'), + (0xFFBC, 'M', 'ᄐ'), + (0xFFBD, 'M', 'ᄑ'), + (0xFFBE, 'M', 'ᄒ'), + (0xFFBF, 'X'), + (0xFFC2, 'M', 'ᅡ'), + (0xFFC3, 'M', 'ᅢ'), + (0xFFC4, 'M', 'ᅣ'), + (0xFFC5, 'M', 'ᅤ'), + (0xFFC6, 'M', 'ᅥ'), + (0xFFC7, 'M', 'ᅦ'), + (0xFFC8, 'X'), + (0xFFCA, 'M', 'ᅧ'), + (0xFFCB, 'M', 'ᅨ'), + (0xFFCC, 'M', 'ᅩ'), + (0xFFCD, 'M', 'ᅪ'), + (0xFFCE, 'M', 'ᅫ'), + (0xFFCF, 'M', 'ᅬ'), + (0xFFD0, 'X'), + (0xFFD2, 'M', 'ᅭ'), + (0xFFD3, 'M', 'ᅮ'), + (0xFFD4, 'M', 'ᅯ'), + (0xFFD5, 'M', 'ᅰ'), + (0xFFD6, 'M', 'ᅱ'), + (0xFFD7, 'M', 'ᅲ'), + (0xFFD8, 'X'), + (0xFFDA, 'M', 'ᅳ'), + (0xFFDB, 'M', 'ᅴ'), + (0xFFDC, 'M', 'ᅵ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', '¢'), + (0xFFE1, 'M', '£'), + (0xFFE2, 'M', '¬'), + (0xFFE3, '3', ' ̄'), + (0xFFE4, 'M', '¦'), + (0xFFE5, 'M', '¥'), + (0xFFE6, 'M', '₩'), + (0xFFE7, 'X'), + (0xFFE8, 'M', '│'), + (0xFFE9, 'M', '←'), + ] + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFFEA, 'M', '↑'), + (0xFFEB, 'M', '→'), + (0xFFEC, 'M', '↓'), + (0xFFED, 'M', '■'), + (0xFFEE, 'M', '○'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018F, 'X'), + (0x10190, 'V'), + (0x1019D, 'X'), + (0x101A0, 'V'), + (0x101A1, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x102E0, 'V'), + (0x102FC, 'X'), + (0x10300, 'V'), + (0x10324, 'X'), + (0x1032D, 'V'), + (0x1034B, 'X'), + (0x10350, 'V'), + (0x1037B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', '𐐨'), + (0x10401, 'M', '𐐩'), + (0x10402, 'M', '𐐪'), + (0x10403, 'M', '𐐫'), + (0x10404, 'M', '𐐬'), + (0x10405, 'M', '𐐭'), + (0x10406, 'M', '𐐮'), + (0x10407, 'M', '𐐯'), + (0x10408, 'M', '𐐰'), + (0x10409, 'M', '𐐱'), + (0x1040A, 'M', '𐐲'), + (0x1040B, 'M', '𐐳'), + (0x1040C, 'M', '𐐴'), + (0x1040D, 'M', '𐐵'), + (0x1040E, 'M', '𐐶'), + (0x1040F, 'M', '𐐷'), + (0x10410, 'M', '𐐸'), + (0x10411, 'M', '𐐹'), + (0x10412, 'M', '𐐺'), + (0x10413, 'M', '𐐻'), + (0x10414, 'M', '𐐼'), + (0x10415, 'M', '𐐽'), + (0x10416, 'M', '𐐾'), + (0x10417, 'M', '𐐿'), + (0x10418, 'M', '𐑀'), + (0x10419, 'M', '𐑁'), + (0x1041A, 'M', '𐑂'), + (0x1041B, 'M', '𐑃'), + (0x1041C, 'M', '𐑄'), + (0x1041D, 'M', '𐑅'), + (0x1041E, 'M', '𐑆'), + (0x1041F, 'M', '𐑇'), + (0x10420, 'M', '𐑈'), + (0x10421, 'M', '𐑉'), + (0x10422, 'M', '𐑊'), + (0x10423, 'M', '𐑋'), + (0x10424, 'M', '𐑌'), + (0x10425, 'M', '𐑍'), + (0x10426, 'M', '𐑎'), + (0x10427, 'M', '𐑏'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x104B0, 'M', '𐓘'), + (0x104B1, 'M', '𐓙'), + (0x104B2, 'M', '𐓚'), + (0x104B3, 'M', '𐓛'), + (0x104B4, 'M', '𐓜'), + (0x104B5, 'M', '𐓝'), + ] + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x104B6, 'M', '𐓞'), + (0x104B7, 'M', '𐓟'), + (0x104B8, 'M', '𐓠'), + (0x104B9, 'M', '𐓡'), + (0x104BA, 'M', '𐓢'), + (0x104BB, 'M', '𐓣'), + (0x104BC, 'M', '𐓤'), + (0x104BD, 'M', '𐓥'), + (0x104BE, 'M', '𐓦'), + (0x104BF, 'M', '𐓧'), + (0x104C0, 'M', '𐓨'), + (0x104C1, 'M', '𐓩'), + (0x104C2, 'M', '𐓪'), + (0x104C3, 'M', '𐓫'), + (0x104C4, 'M', '𐓬'), + (0x104C5, 'M', '𐓭'), + (0x104C6, 'M', '𐓮'), + (0x104C7, 'M', '𐓯'), + (0x104C8, 'M', '𐓰'), + (0x104C9, 'M', '𐓱'), + (0x104CA, 'M', '𐓲'), + (0x104CB, 'M', '𐓳'), + (0x104CC, 'M', '𐓴'), + (0x104CD, 'M', '𐓵'), + (0x104CE, 'M', '𐓶'), + (0x104CF, 'M', '𐓷'), + (0x104D0, 'M', '𐓸'), + (0x104D1, 'M', '𐓹'), + (0x104D2, 'M', '𐓺'), + (0x104D3, 'M', '𐓻'), + (0x104D4, 'X'), + (0x104D8, 'V'), + (0x104FC, 'X'), + (0x10500, 'V'), + (0x10528, 'X'), + (0x10530, 'V'), + (0x10564, 'X'), + (0x1056F, 'V'), + (0x10570, 'M', '𐖗'), + (0x10571, 'M', '𐖘'), + (0x10572, 'M', '𐖙'), + (0x10573, 'M', '𐖚'), + (0x10574, 'M', '𐖛'), + (0x10575, 'M', '𐖜'), + (0x10576, 'M', '𐖝'), + (0x10577, 'M', '𐖞'), + (0x10578, 'M', '𐖟'), + (0x10579, 'M', '𐖠'), + (0x1057A, 'M', '𐖡'), + (0x1057B, 'X'), + (0x1057C, 'M', '𐖣'), + (0x1057D, 'M', '𐖤'), + (0x1057E, 'M', '𐖥'), + (0x1057F, 'M', '𐖦'), + (0x10580, 'M', '𐖧'), + (0x10581, 'M', '𐖨'), + (0x10582, 'M', '𐖩'), + (0x10583, 'M', '𐖪'), + (0x10584, 'M', '𐖫'), + (0x10585, 'M', '𐖬'), + (0x10586, 'M', '𐖭'), + (0x10587, 'M', '𐖮'), + (0x10588, 'M', '𐖯'), + (0x10589, 'M', '𐖰'), + (0x1058A, 'M', '𐖱'), + (0x1058B, 'X'), + (0x1058C, 'M', '𐖳'), + (0x1058D, 'M', '𐖴'), + (0x1058E, 'M', '𐖵'), + (0x1058F, 'M', '𐖶'), + (0x10590, 'M', '𐖷'), + (0x10591, 'M', '𐖸'), + (0x10592, 'M', '𐖹'), + (0x10593, 'X'), + (0x10594, 'M', '𐖻'), + (0x10595, 'M', '𐖼'), + (0x10596, 'X'), + (0x10597, 'V'), + (0x105A2, 'X'), + (0x105A3, 'V'), + (0x105B2, 'X'), + (0x105B3, 'V'), + (0x105BA, 'X'), + (0x105BB, 'V'), + (0x105BD, 'X'), + (0x10600, 'V'), + (0x10737, 'X'), + (0x10740, 'V'), + (0x10756, 'X'), + (0x10760, 'V'), + (0x10768, 'X'), + (0x10780, 'V'), + (0x10781, 'M', 'ː'), + (0x10782, 'M', 'ˑ'), + (0x10783, 'M', 'æ'), + (0x10784, 'M', 'ʙ'), + (0x10785, 'M', 'ɓ'), + (0x10786, 'X'), + (0x10787, 'M', 'ʣ'), + (0x10788, 'M', 'ꭦ'), + ] + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10789, 'M', 'ʥ'), + (0x1078A, 'M', 'ʤ'), + (0x1078B, 'M', 'ɖ'), + (0x1078C, 'M', 'ɗ'), + (0x1078D, 'M', 'ᶑ'), + (0x1078E, 'M', 'ɘ'), + (0x1078F, 'M', 'ɞ'), + (0x10790, 'M', 'ʩ'), + (0x10791, 'M', 'ɤ'), + (0x10792, 'M', 'ɢ'), + (0x10793, 'M', 'ɠ'), + (0x10794, 'M', 'ʛ'), + (0x10795, 'M', 'ħ'), + (0x10796, 'M', 'ʜ'), + (0x10797, 'M', 'ɧ'), + (0x10798, 'M', 'ʄ'), + (0x10799, 'M', 'ʪ'), + (0x1079A, 'M', 'ʫ'), + (0x1079B, 'M', 'ɬ'), + (0x1079C, 'M', '𝼄'), + (0x1079D, 'M', 'ꞎ'), + (0x1079E, 'M', 'ɮ'), + (0x1079F, 'M', '𝼅'), + (0x107A0, 'M', 'ʎ'), + (0x107A1, 'M', '𝼆'), + (0x107A2, 'M', 'ø'), + (0x107A3, 'M', 'ɶ'), + (0x107A4, 'M', 'ɷ'), + (0x107A5, 'M', 'q'), + (0x107A6, 'M', 'ɺ'), + (0x107A7, 'M', '𝼈'), + (0x107A8, 'M', 'ɽ'), + (0x107A9, 'M', 'ɾ'), + (0x107AA, 'M', 'ʀ'), + (0x107AB, 'M', 'ʨ'), + (0x107AC, 'M', 'ʦ'), + (0x107AD, 'M', 'ꭧ'), + (0x107AE, 'M', 'ʧ'), + (0x107AF, 'M', 'ʈ'), + (0x107B0, 'M', 'ⱱ'), + (0x107B1, 'X'), + (0x107B2, 'M', 'ʏ'), + (0x107B3, 'M', 'ʡ'), + (0x107B4, 'M', 'ʢ'), + (0x107B5, 'M', 'ʘ'), + (0x107B6, 'M', 'ǀ'), + (0x107B7, 'M', 'ǁ'), + (0x107B8, 'M', 'ǂ'), + (0x107B9, 'M', '𝼊'), + (0x107BA, 'M', '𝼞'), + (0x107BB, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x1089F, 'X'), + (0x108A7, 'V'), + (0x108B0, 'X'), + (0x108E0, 'V'), + (0x108F3, 'X'), + (0x108F4, 'V'), + (0x108F6, 'X'), + (0x108FB, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BC, 'V'), + (0x109D0, 'X'), + (0x109D2, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A36, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A49, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10AA0, 'X'), + (0x10AC0, 'V'), + ] + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10AE7, 'X'), + (0x10AEB, 'V'), + (0x10AF7, 'X'), + (0x10B00, 'V'), + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B92, 'X'), + (0x10B99, 'V'), + (0x10B9D, 'X'), + (0x10BA9, 'V'), + (0x10BB0, 'X'), + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10C80, 'M', '𐳀'), + (0x10C81, 'M', '𐳁'), + (0x10C82, 'M', '𐳂'), + (0x10C83, 'M', '𐳃'), + (0x10C84, 'M', '𐳄'), + (0x10C85, 'M', '𐳅'), + (0x10C86, 'M', '𐳆'), + (0x10C87, 'M', '𐳇'), + (0x10C88, 'M', '𐳈'), + (0x10C89, 'M', '𐳉'), + (0x10C8A, 'M', '𐳊'), + (0x10C8B, 'M', '𐳋'), + (0x10C8C, 'M', '𐳌'), + (0x10C8D, 'M', '𐳍'), + (0x10C8E, 'M', '𐳎'), + (0x10C8F, 'M', '𐳏'), + (0x10C90, 'M', '𐳐'), + (0x10C91, 'M', '𐳑'), + (0x10C92, 'M', '𐳒'), + (0x10C93, 'M', '𐳓'), + (0x10C94, 'M', '𐳔'), + (0x10C95, 'M', '𐳕'), + (0x10C96, 'M', '𐳖'), + (0x10C97, 'M', '𐳗'), + (0x10C98, 'M', '𐳘'), + (0x10C99, 'M', '𐳙'), + (0x10C9A, 'M', '𐳚'), + (0x10C9B, 'M', '𐳛'), + (0x10C9C, 'M', '𐳜'), + (0x10C9D, 'M', '𐳝'), + (0x10C9E, 'M', '𐳞'), + (0x10C9F, 'M', '𐳟'), + (0x10CA0, 'M', '𐳠'), + (0x10CA1, 'M', '𐳡'), + (0x10CA2, 'M', '𐳢'), + (0x10CA3, 'M', '𐳣'), + (0x10CA4, 'M', '𐳤'), + (0x10CA5, 'M', '𐳥'), + (0x10CA6, 'M', '𐳦'), + (0x10CA7, 'M', '𐳧'), + (0x10CA8, 'M', '𐳨'), + (0x10CA9, 'M', '𐳩'), + (0x10CAA, 'M', '𐳪'), + (0x10CAB, 'M', '𐳫'), + (0x10CAC, 'M', '𐳬'), + (0x10CAD, 'M', '𐳭'), + (0x10CAE, 'M', '𐳮'), + (0x10CAF, 'M', '𐳯'), + (0x10CB0, 'M', '𐳰'), + (0x10CB1, 'M', '𐳱'), + (0x10CB2, 'M', '𐳲'), + (0x10CB3, 'X'), + (0x10CC0, 'V'), + (0x10CF3, 'X'), + (0x10CFA, 'V'), + (0x10D28, 'X'), + (0x10D30, 'V'), + (0x10D3A, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x10E80, 'V'), + (0x10EAA, 'X'), + (0x10EAB, 'V'), + (0x10EAE, 'X'), + (0x10EB0, 'V'), + (0x10EB2, 'X'), + (0x10F00, 'V'), + (0x10F28, 'X'), + (0x10F30, 'V'), + (0x10F5A, 'X'), + (0x10F70, 'V'), + (0x10F8A, 'X'), + (0x10FB0, 'V'), + (0x10FCC, 'X'), + (0x10FE0, 'V'), + (0x10FF7, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11076, 'X'), + (0x1107F, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + ] + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x110C3, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11148, 'X'), + (0x11150, 'V'), + (0x11177, 'X'), + (0x11180, 'V'), + (0x111E0, 'X'), + (0x111E1, 'V'), + (0x111F5, 'X'), + (0x11200, 'V'), + (0x11212, 'X'), + (0x11213, 'V'), + (0x1123F, 'X'), + (0x11280, 'V'), + (0x11287, 'X'), + (0x11288, 'V'), + (0x11289, 'X'), + (0x1128A, 'V'), + (0x1128E, 'X'), + (0x1128F, 'V'), + (0x1129E, 'X'), + (0x1129F, 'V'), + (0x112AA, 'X'), + (0x112B0, 'V'), + (0x112EB, 'X'), + (0x112F0, 'V'), + (0x112FA, 'X'), + (0x11300, 'V'), + (0x11304, 'X'), + (0x11305, 'V'), + (0x1130D, 'X'), + (0x1130F, 'V'), + (0x11311, 'X'), + (0x11313, 'V'), + (0x11329, 'X'), + (0x1132A, 'V'), + (0x11331, 'X'), + (0x11332, 'V'), + (0x11334, 'X'), + (0x11335, 'V'), + (0x1133A, 'X'), + (0x1133B, 'V'), + (0x11345, 'X'), + (0x11347, 'V'), + (0x11349, 'X'), + (0x1134B, 'V'), + (0x1134E, 'X'), + (0x11350, 'V'), + (0x11351, 'X'), + (0x11357, 'V'), + (0x11358, 'X'), + (0x1135D, 'V'), + (0x11364, 'X'), + (0x11366, 'V'), + (0x1136D, 'X'), + (0x11370, 'V'), + (0x11375, 'X'), + (0x11400, 'V'), + (0x1145C, 'X'), + (0x1145D, 'V'), + (0x11462, 'X'), + (0x11480, 'V'), + (0x114C8, 'X'), + (0x114D0, 'V'), + (0x114DA, 'X'), + (0x11580, 'V'), + (0x115B6, 'X'), + (0x115B8, 'V'), + (0x115DE, 'X'), + (0x11600, 'V'), + (0x11645, 'X'), + (0x11650, 'V'), + (0x1165A, 'X'), + (0x11660, 'V'), + (0x1166D, 'X'), + (0x11680, 'V'), + (0x116BA, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x11700, 'V'), + (0x1171B, 'X'), + (0x1171D, 'V'), + (0x1172C, 'X'), + (0x11730, 'V'), + (0x11747, 'X'), + (0x11800, 'V'), + (0x1183C, 'X'), + (0x118A0, 'M', '𑣀'), + (0x118A1, 'M', '𑣁'), + (0x118A2, 'M', '𑣂'), + (0x118A3, 'M', '𑣃'), + (0x118A4, 'M', '𑣄'), + (0x118A5, 'M', '𑣅'), + (0x118A6, 'M', '𑣆'), + ] + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x118A7, 'M', '𑣇'), + (0x118A8, 'M', '𑣈'), + (0x118A9, 'M', '𑣉'), + (0x118AA, 'M', '𑣊'), + (0x118AB, 'M', '𑣋'), + (0x118AC, 'M', '𑣌'), + (0x118AD, 'M', '𑣍'), + (0x118AE, 'M', '𑣎'), + (0x118AF, 'M', '𑣏'), + (0x118B0, 'M', '𑣐'), + (0x118B1, 'M', '𑣑'), + (0x118B2, 'M', '𑣒'), + (0x118B3, 'M', '𑣓'), + (0x118B4, 'M', '𑣔'), + (0x118B5, 'M', '𑣕'), + (0x118B6, 'M', '𑣖'), + (0x118B7, 'M', '𑣗'), + (0x118B8, 'M', '𑣘'), + (0x118B9, 'M', '𑣙'), + (0x118BA, 'M', '𑣚'), + (0x118BB, 'M', '𑣛'), + (0x118BC, 'M', '𑣜'), + (0x118BD, 'M', '𑣝'), + (0x118BE, 'M', '𑣞'), + (0x118BF, 'M', '𑣟'), + (0x118C0, 'V'), + (0x118F3, 'X'), + (0x118FF, 'V'), + (0x11907, 'X'), + (0x11909, 'V'), + (0x1190A, 'X'), + (0x1190C, 'V'), + (0x11914, 'X'), + (0x11915, 'V'), + (0x11917, 'X'), + (0x11918, 'V'), + (0x11936, 'X'), + (0x11937, 'V'), + (0x11939, 'X'), + (0x1193B, 'V'), + (0x11947, 'X'), + (0x11950, 'V'), + (0x1195A, 'X'), + (0x119A0, 'V'), + (0x119A8, 'X'), + (0x119AA, 'V'), + (0x119D8, 'X'), + (0x119DA, 'V'), + (0x119E5, 'X'), + (0x11A00, 'V'), + (0x11A48, 'X'), + (0x11A50, 'V'), + (0x11AA3, 'X'), + (0x11AB0, 'V'), + (0x11AF9, 'X'), + (0x11C00, 'V'), + (0x11C09, 'X'), + (0x11C0A, 'V'), + (0x11C37, 'X'), + (0x11C38, 'V'), + (0x11C46, 'X'), + (0x11C50, 'V'), + (0x11C6D, 'X'), + (0x11C70, 'V'), + (0x11C90, 'X'), + (0x11C92, 'V'), + (0x11CA8, 'X'), + (0x11CA9, 'V'), + (0x11CB7, 'X'), + (0x11D00, 'V'), + (0x11D07, 'X'), + (0x11D08, 'V'), + (0x11D0A, 'X'), + (0x11D0B, 'V'), + (0x11D37, 'X'), + (0x11D3A, 'V'), + (0x11D3B, 'X'), + (0x11D3C, 'V'), + (0x11D3E, 'X'), + (0x11D3F, 'V'), + (0x11D48, 'X'), + (0x11D50, 'V'), + (0x11D5A, 'X'), + (0x11D60, 'V'), + (0x11D66, 'X'), + (0x11D67, 'V'), + (0x11D69, 'X'), + (0x11D6A, 'V'), + (0x11D8F, 'X'), + (0x11D90, 'V'), + (0x11D92, 'X'), + (0x11D93, 'V'), + (0x11D99, 'X'), + (0x11DA0, 'V'), + (0x11DAA, 'X'), + (0x11EE0, 'V'), + (0x11EF9, 'X'), + (0x11FB0, 'V'), + (0x11FB1, 'X'), + (0x11FC0, 'V'), + ] + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11FF2, 'X'), + (0x11FFF, 'V'), + (0x1239A, 'X'), + (0x12400, 'V'), + (0x1246F, 'X'), + (0x12470, 'V'), + (0x12475, 'X'), + (0x12480, 'V'), + (0x12544, 'X'), + (0x12F90, 'V'), + (0x12FF3, 'X'), + (0x13000, 'V'), + (0x1342F, 'X'), + (0x14400, 'V'), + (0x14647, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16A40, 'V'), + (0x16A5F, 'X'), + (0x16A60, 'V'), + (0x16A6A, 'X'), + (0x16A6E, 'V'), + (0x16ABF, 'X'), + (0x16AC0, 'V'), + (0x16ACA, 'X'), + (0x16AD0, 'V'), + (0x16AEE, 'X'), + (0x16AF0, 'V'), + (0x16AF6, 'X'), + (0x16B00, 'V'), + (0x16B46, 'X'), + (0x16B50, 'V'), + (0x16B5A, 'X'), + (0x16B5B, 'V'), + (0x16B62, 'X'), + (0x16B63, 'V'), + (0x16B78, 'X'), + (0x16B7D, 'V'), + (0x16B90, 'X'), + (0x16E40, 'M', '𖹠'), + (0x16E41, 'M', '𖹡'), + (0x16E42, 'M', '𖹢'), + (0x16E43, 'M', '𖹣'), + (0x16E44, 'M', '𖹤'), + (0x16E45, 'M', '𖹥'), + (0x16E46, 'M', '𖹦'), + (0x16E47, 'M', '𖹧'), + (0x16E48, 'M', '𖹨'), + (0x16E49, 'M', '𖹩'), + (0x16E4A, 'M', '𖹪'), + (0x16E4B, 'M', '𖹫'), + (0x16E4C, 'M', '𖹬'), + (0x16E4D, 'M', '𖹭'), + (0x16E4E, 'M', '𖹮'), + (0x16E4F, 'M', '𖹯'), + (0x16E50, 'M', '𖹰'), + (0x16E51, 'M', '𖹱'), + (0x16E52, 'M', '𖹲'), + (0x16E53, 'M', '𖹳'), + (0x16E54, 'M', '𖹴'), + (0x16E55, 'M', '𖹵'), + (0x16E56, 'M', '𖹶'), + (0x16E57, 'M', '𖹷'), + (0x16E58, 'M', '𖹸'), + (0x16E59, 'M', '𖹹'), + (0x16E5A, 'M', '𖹺'), + (0x16E5B, 'M', '𖹻'), + (0x16E5C, 'M', '𖹼'), + (0x16E5D, 'M', '𖹽'), + (0x16E5E, 'M', '𖹾'), + (0x16E5F, 'M', '𖹿'), + (0x16E60, 'V'), + (0x16E9B, 'X'), + (0x16F00, 'V'), + (0x16F4B, 'X'), + (0x16F4F, 'V'), + (0x16F88, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x16FE0, 'V'), + (0x16FE5, 'X'), + (0x16FF0, 'V'), + (0x16FF2, 'X'), + (0x17000, 'V'), + (0x187F8, 'X'), + (0x18800, 'V'), + (0x18CD6, 'X'), + (0x18D00, 'V'), + (0x18D09, 'X'), + (0x1AFF0, 'V'), + (0x1AFF4, 'X'), + (0x1AFF5, 'V'), + (0x1AFFC, 'X'), + (0x1AFFD, 'V'), + (0x1AFFF, 'X'), + (0x1B000, 'V'), + (0x1B123, 'X'), + (0x1B150, 'V'), + (0x1B153, 'X'), + (0x1B164, 'V'), + ] + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1B168, 'X'), + (0x1B170, 'V'), + (0x1B2FC, 'X'), + (0x1BC00, 'V'), + (0x1BC6B, 'X'), + (0x1BC70, 'V'), + (0x1BC7D, 'X'), + (0x1BC80, 'V'), + (0x1BC89, 'X'), + (0x1BC90, 'V'), + (0x1BC9A, 'X'), + (0x1BC9C, 'V'), + (0x1BCA0, 'I'), + (0x1BCA4, 'X'), + (0x1CF00, 'V'), + (0x1CF2E, 'X'), + (0x1CF30, 'V'), + (0x1CF47, 'X'), + (0x1CF50, 'V'), + (0x1CFC4, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', '𝅗𝅥'), + (0x1D15F, 'M', '𝅘𝅥'), + (0x1D160, 'M', '𝅘𝅥𝅮'), + (0x1D161, 'M', '𝅘𝅥𝅯'), + (0x1D162, 'M', '𝅘𝅥𝅰'), + (0x1D163, 'M', '𝅘𝅥𝅱'), + (0x1D164, 'M', '𝅘𝅥𝅲'), + (0x1D165, 'V'), + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', '𝆹𝅥'), + (0x1D1BC, 'M', '𝆺𝅥'), + (0x1D1BD, 'M', '𝆹𝅥𝅮'), + (0x1D1BE, 'M', '𝆺𝅥𝅮'), + (0x1D1BF, 'M', '𝆹𝅥𝅯'), + (0x1D1C0, 'M', '𝆺𝅥𝅯'), + (0x1D1C1, 'V'), + (0x1D1EB, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D2E0, 'V'), + (0x1D2F4, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D379, 'X'), + (0x1D400, 'M', 'a'), + (0x1D401, 'M', 'b'), + (0x1D402, 'M', 'c'), + (0x1D403, 'M', 'd'), + (0x1D404, 'M', 'e'), + (0x1D405, 'M', 'f'), + (0x1D406, 'M', 'g'), + (0x1D407, 'M', 'h'), + (0x1D408, 'M', 'i'), + (0x1D409, 'M', 'j'), + (0x1D40A, 'M', 'k'), + (0x1D40B, 'M', 'l'), + (0x1D40C, 'M', 'm'), + (0x1D40D, 'M', 'n'), + (0x1D40E, 'M', 'o'), + (0x1D40F, 'M', 'p'), + (0x1D410, 'M', 'q'), + (0x1D411, 'M', 'r'), + (0x1D412, 'M', 's'), + (0x1D413, 'M', 't'), + (0x1D414, 'M', 'u'), + (0x1D415, 'M', 'v'), + (0x1D416, 'M', 'w'), + (0x1D417, 'M', 'x'), + (0x1D418, 'M', 'y'), + (0x1D419, 'M', 'z'), + (0x1D41A, 'M', 'a'), + (0x1D41B, 'M', 'b'), + (0x1D41C, 'M', 'c'), + (0x1D41D, 'M', 'd'), + (0x1D41E, 'M', 'e'), + (0x1D41F, 'M', 'f'), + (0x1D420, 'M', 'g'), + (0x1D421, 'M', 'h'), + (0x1D422, 'M', 'i'), + (0x1D423, 'M', 'j'), + (0x1D424, 'M', 'k'), + (0x1D425, 'M', 'l'), + (0x1D426, 'M', 'm'), + (0x1D427, 'M', 'n'), + (0x1D428, 'M', 'o'), + (0x1D429, 'M', 'p'), + (0x1D42A, 'M', 'q'), + (0x1D42B, 'M', 'r'), + (0x1D42C, 'M', 's'), + (0x1D42D, 'M', 't'), + (0x1D42E, 'M', 'u'), + (0x1D42F, 'M', 'v'), + (0x1D430, 'M', 'w'), + ] + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D431, 'M', 'x'), + (0x1D432, 'M', 'y'), + (0x1D433, 'M', 'z'), + (0x1D434, 'M', 'a'), + (0x1D435, 'M', 'b'), + (0x1D436, 'M', 'c'), + (0x1D437, 'M', 'd'), + (0x1D438, 'M', 'e'), + (0x1D439, 'M', 'f'), + (0x1D43A, 'M', 'g'), + (0x1D43B, 'M', 'h'), + (0x1D43C, 'M', 'i'), + (0x1D43D, 'M', 'j'), + (0x1D43E, 'M', 'k'), + (0x1D43F, 'M', 'l'), + (0x1D440, 'M', 'm'), + (0x1D441, 'M', 'n'), + (0x1D442, 'M', 'o'), + (0x1D443, 'M', 'p'), + (0x1D444, 'M', 'q'), + (0x1D445, 'M', 'r'), + (0x1D446, 'M', 's'), + (0x1D447, 'M', 't'), + (0x1D448, 'M', 'u'), + (0x1D449, 'M', 'v'), + (0x1D44A, 'M', 'w'), + (0x1D44B, 'M', 'x'), + (0x1D44C, 'M', 'y'), + (0x1D44D, 'M', 'z'), + (0x1D44E, 'M', 'a'), + (0x1D44F, 'M', 'b'), + (0x1D450, 'M', 'c'), + (0x1D451, 'M', 'd'), + (0x1D452, 'M', 'e'), + (0x1D453, 'M', 'f'), + (0x1D454, 'M', 'g'), + (0x1D455, 'X'), + (0x1D456, 'M', 'i'), + (0x1D457, 'M', 'j'), + (0x1D458, 'M', 'k'), + (0x1D459, 'M', 'l'), + (0x1D45A, 'M', 'm'), + (0x1D45B, 'M', 'n'), + (0x1D45C, 'M', 'o'), + (0x1D45D, 'M', 'p'), + (0x1D45E, 'M', 'q'), + (0x1D45F, 'M', 'r'), + (0x1D460, 'M', 's'), + (0x1D461, 'M', 't'), + (0x1D462, 'M', 'u'), + (0x1D463, 'M', 'v'), + (0x1D464, 'M', 'w'), + (0x1D465, 'M', 'x'), + (0x1D466, 'M', 'y'), + (0x1D467, 'M', 'z'), + (0x1D468, 'M', 'a'), + (0x1D469, 'M', 'b'), + (0x1D46A, 'M', 'c'), + (0x1D46B, 'M', 'd'), + (0x1D46C, 'M', 'e'), + (0x1D46D, 'M', 'f'), + (0x1D46E, 'M', 'g'), + (0x1D46F, 'M', 'h'), + (0x1D470, 'M', 'i'), + (0x1D471, 'M', 'j'), + (0x1D472, 'M', 'k'), + (0x1D473, 'M', 'l'), + (0x1D474, 'M', 'm'), + (0x1D475, 'M', 'n'), + (0x1D476, 'M', 'o'), + (0x1D477, 'M', 'p'), + (0x1D478, 'M', 'q'), + (0x1D479, 'M', 'r'), + (0x1D47A, 'M', 's'), + (0x1D47B, 'M', 't'), + (0x1D47C, 'M', 'u'), + (0x1D47D, 'M', 'v'), + (0x1D47E, 'M', 'w'), + (0x1D47F, 'M', 'x'), + (0x1D480, 'M', 'y'), + (0x1D481, 'M', 'z'), + (0x1D482, 'M', 'a'), + (0x1D483, 'M', 'b'), + (0x1D484, 'M', 'c'), + (0x1D485, 'M', 'd'), + (0x1D486, 'M', 'e'), + (0x1D487, 'M', 'f'), + (0x1D488, 'M', 'g'), + (0x1D489, 'M', 'h'), + (0x1D48A, 'M', 'i'), + (0x1D48B, 'M', 'j'), + (0x1D48C, 'M', 'k'), + (0x1D48D, 'M', 'l'), + (0x1D48E, 'M', 'm'), + (0x1D48F, 'M', 'n'), + (0x1D490, 'M', 'o'), + (0x1D491, 'M', 'p'), + (0x1D492, 'M', 'q'), + (0x1D493, 'M', 'r'), + (0x1D494, 'M', 's'), + ] + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D495, 'M', 't'), + (0x1D496, 'M', 'u'), + (0x1D497, 'M', 'v'), + (0x1D498, 'M', 'w'), + (0x1D499, 'M', 'x'), + (0x1D49A, 'M', 'y'), + (0x1D49B, 'M', 'z'), + (0x1D49C, 'M', 'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', 'c'), + (0x1D49F, 'M', 'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', 'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', 'j'), + (0x1D4A6, 'M', 'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', 'n'), + (0x1D4AA, 'M', 'o'), + (0x1D4AB, 'M', 'p'), + (0x1D4AC, 'M', 'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', 's'), + (0x1D4AF, 'M', 't'), + (0x1D4B0, 'M', 'u'), + (0x1D4B1, 'M', 'v'), + (0x1D4B2, 'M', 'w'), + (0x1D4B3, 'M', 'x'), + (0x1D4B4, 'M', 'y'), + (0x1D4B5, 'M', 'z'), + (0x1D4B6, 'M', 'a'), + (0x1D4B7, 'M', 'b'), + (0x1D4B8, 'M', 'c'), + (0x1D4B9, 'M', 'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', 'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', 'h'), + (0x1D4BE, 'M', 'i'), + (0x1D4BF, 'M', 'j'), + (0x1D4C0, 'M', 'k'), + (0x1D4C1, 'M', 'l'), + (0x1D4C2, 'M', 'm'), + (0x1D4C3, 'M', 'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', 'p'), + (0x1D4C6, 'M', 'q'), + (0x1D4C7, 'M', 'r'), + (0x1D4C8, 'M', 's'), + (0x1D4C9, 'M', 't'), + (0x1D4CA, 'M', 'u'), + (0x1D4CB, 'M', 'v'), + (0x1D4CC, 'M', 'w'), + (0x1D4CD, 'M', 'x'), + (0x1D4CE, 'M', 'y'), + (0x1D4CF, 'M', 'z'), + (0x1D4D0, 'M', 'a'), + (0x1D4D1, 'M', 'b'), + (0x1D4D2, 'M', 'c'), + (0x1D4D3, 'M', 'd'), + (0x1D4D4, 'M', 'e'), + (0x1D4D5, 'M', 'f'), + (0x1D4D6, 'M', 'g'), + (0x1D4D7, 'M', 'h'), + (0x1D4D8, 'M', 'i'), + (0x1D4D9, 'M', 'j'), + (0x1D4DA, 'M', 'k'), + (0x1D4DB, 'M', 'l'), + (0x1D4DC, 'M', 'm'), + (0x1D4DD, 'M', 'n'), + (0x1D4DE, 'M', 'o'), + (0x1D4DF, 'M', 'p'), + (0x1D4E0, 'M', 'q'), + (0x1D4E1, 'M', 'r'), + (0x1D4E2, 'M', 's'), + (0x1D4E3, 'M', 't'), + (0x1D4E4, 'M', 'u'), + (0x1D4E5, 'M', 'v'), + (0x1D4E6, 'M', 'w'), + (0x1D4E7, 'M', 'x'), + (0x1D4E8, 'M', 'y'), + (0x1D4E9, 'M', 'z'), + (0x1D4EA, 'M', 'a'), + (0x1D4EB, 'M', 'b'), + (0x1D4EC, 'M', 'c'), + (0x1D4ED, 'M', 'd'), + (0x1D4EE, 'M', 'e'), + (0x1D4EF, 'M', 'f'), + (0x1D4F0, 'M', 'g'), + (0x1D4F1, 'M', 'h'), + (0x1D4F2, 'M', 'i'), + (0x1D4F3, 'M', 'j'), + (0x1D4F4, 'M', 'k'), + (0x1D4F5, 'M', 'l'), + (0x1D4F6, 'M', 'm'), + (0x1D4F7, 'M', 'n'), + (0x1D4F8, 'M', 'o'), + (0x1D4F9, 'M', 'p'), + (0x1D4FA, 'M', 'q'), + (0x1D4FB, 'M', 'r'), + ] + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D4FC, 'M', 's'), + (0x1D4FD, 'M', 't'), + (0x1D4FE, 'M', 'u'), + (0x1D4FF, 'M', 'v'), + (0x1D500, 'M', 'w'), + (0x1D501, 'M', 'x'), + (0x1D502, 'M', 'y'), + (0x1D503, 'M', 'z'), + (0x1D504, 'M', 'a'), + (0x1D505, 'M', 'b'), + (0x1D506, 'X'), + (0x1D507, 'M', 'd'), + (0x1D508, 'M', 'e'), + (0x1D509, 'M', 'f'), + (0x1D50A, 'M', 'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', 'j'), + (0x1D50E, 'M', 'k'), + (0x1D50F, 'M', 'l'), + (0x1D510, 'M', 'm'), + (0x1D511, 'M', 'n'), + (0x1D512, 'M', 'o'), + (0x1D513, 'M', 'p'), + (0x1D514, 'M', 'q'), + (0x1D515, 'X'), + (0x1D516, 'M', 's'), + (0x1D517, 'M', 't'), + (0x1D518, 'M', 'u'), + (0x1D519, 'M', 'v'), + (0x1D51A, 'M', 'w'), + (0x1D51B, 'M', 'x'), + (0x1D51C, 'M', 'y'), + (0x1D51D, 'X'), + (0x1D51E, 'M', 'a'), + (0x1D51F, 'M', 'b'), + (0x1D520, 'M', 'c'), + (0x1D521, 'M', 'd'), + (0x1D522, 'M', 'e'), + (0x1D523, 'M', 'f'), + (0x1D524, 'M', 'g'), + (0x1D525, 'M', 'h'), + (0x1D526, 'M', 'i'), + (0x1D527, 'M', 'j'), + (0x1D528, 'M', 'k'), + (0x1D529, 'M', 'l'), + (0x1D52A, 'M', 'm'), + (0x1D52B, 'M', 'n'), + (0x1D52C, 'M', 'o'), + (0x1D52D, 'M', 'p'), + (0x1D52E, 'M', 'q'), + (0x1D52F, 'M', 'r'), + (0x1D530, 'M', 's'), + (0x1D531, 'M', 't'), + (0x1D532, 'M', 'u'), + (0x1D533, 'M', 'v'), + (0x1D534, 'M', 'w'), + (0x1D535, 'M', 'x'), + (0x1D536, 'M', 'y'), + (0x1D537, 'M', 'z'), + (0x1D538, 'M', 'a'), + (0x1D539, 'M', 'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', 'd'), + (0x1D53C, 'M', 'e'), + (0x1D53D, 'M', 'f'), + (0x1D53E, 'M', 'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', 'i'), + (0x1D541, 'M', 'j'), + (0x1D542, 'M', 'k'), + (0x1D543, 'M', 'l'), + (0x1D544, 'M', 'm'), + (0x1D545, 'X'), + (0x1D546, 'M', 'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', 's'), + (0x1D54B, 'M', 't'), + (0x1D54C, 'M', 'u'), + (0x1D54D, 'M', 'v'), + (0x1D54E, 'M', 'w'), + (0x1D54F, 'M', 'x'), + (0x1D550, 'M', 'y'), + (0x1D551, 'X'), + (0x1D552, 'M', 'a'), + (0x1D553, 'M', 'b'), + (0x1D554, 'M', 'c'), + (0x1D555, 'M', 'd'), + (0x1D556, 'M', 'e'), + (0x1D557, 'M', 'f'), + (0x1D558, 'M', 'g'), + (0x1D559, 'M', 'h'), + (0x1D55A, 'M', 'i'), + (0x1D55B, 'M', 'j'), + (0x1D55C, 'M', 'k'), + (0x1D55D, 'M', 'l'), + (0x1D55E, 'M', 'm'), + (0x1D55F, 'M', 'n'), + (0x1D560, 'M', 'o'), + (0x1D561, 'M', 'p'), + (0x1D562, 'M', 'q'), + ] + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D563, 'M', 'r'), + (0x1D564, 'M', 's'), + (0x1D565, 'M', 't'), + (0x1D566, 'M', 'u'), + (0x1D567, 'M', 'v'), + (0x1D568, 'M', 'w'), + (0x1D569, 'M', 'x'), + (0x1D56A, 'M', 'y'), + (0x1D56B, 'M', 'z'), + (0x1D56C, 'M', 'a'), + (0x1D56D, 'M', 'b'), + (0x1D56E, 'M', 'c'), + (0x1D56F, 'M', 'd'), + (0x1D570, 'M', 'e'), + (0x1D571, 'M', 'f'), + (0x1D572, 'M', 'g'), + (0x1D573, 'M', 'h'), + (0x1D574, 'M', 'i'), + (0x1D575, 'M', 'j'), + (0x1D576, 'M', 'k'), + (0x1D577, 'M', 'l'), + (0x1D578, 'M', 'm'), + (0x1D579, 'M', 'n'), + (0x1D57A, 'M', 'o'), + (0x1D57B, 'M', 'p'), + (0x1D57C, 'M', 'q'), + (0x1D57D, 'M', 'r'), + (0x1D57E, 'M', 's'), + (0x1D57F, 'M', 't'), + (0x1D580, 'M', 'u'), + (0x1D581, 'M', 'v'), + (0x1D582, 'M', 'w'), + (0x1D583, 'M', 'x'), + (0x1D584, 'M', 'y'), + (0x1D585, 'M', 'z'), + (0x1D586, 'M', 'a'), + (0x1D587, 'M', 'b'), + (0x1D588, 'M', 'c'), + (0x1D589, 'M', 'd'), + (0x1D58A, 'M', 'e'), + (0x1D58B, 'M', 'f'), + (0x1D58C, 'M', 'g'), + (0x1D58D, 'M', 'h'), + (0x1D58E, 'M', 'i'), + (0x1D58F, 'M', 'j'), + (0x1D590, 'M', 'k'), + (0x1D591, 'M', 'l'), + (0x1D592, 'M', 'm'), + (0x1D593, 'M', 'n'), + (0x1D594, 'M', 'o'), + (0x1D595, 'M', 'p'), + (0x1D596, 'M', 'q'), + (0x1D597, 'M', 'r'), + (0x1D598, 'M', 's'), + (0x1D599, 'M', 't'), + (0x1D59A, 'M', 'u'), + (0x1D59B, 'M', 'v'), + (0x1D59C, 'M', 'w'), + (0x1D59D, 'M', 'x'), + (0x1D59E, 'M', 'y'), + (0x1D59F, 'M', 'z'), + (0x1D5A0, 'M', 'a'), + (0x1D5A1, 'M', 'b'), + (0x1D5A2, 'M', 'c'), + (0x1D5A3, 'M', 'd'), + (0x1D5A4, 'M', 'e'), + (0x1D5A5, 'M', 'f'), + (0x1D5A6, 'M', 'g'), + (0x1D5A7, 'M', 'h'), + (0x1D5A8, 'M', 'i'), + (0x1D5A9, 'M', 'j'), + (0x1D5AA, 'M', 'k'), + (0x1D5AB, 'M', 'l'), + (0x1D5AC, 'M', 'm'), + (0x1D5AD, 'M', 'n'), + (0x1D5AE, 'M', 'o'), + (0x1D5AF, 'M', 'p'), + (0x1D5B0, 'M', 'q'), + (0x1D5B1, 'M', 'r'), + (0x1D5B2, 'M', 's'), + (0x1D5B3, 'M', 't'), + (0x1D5B4, 'M', 'u'), + (0x1D5B5, 'M', 'v'), + (0x1D5B6, 'M', 'w'), + (0x1D5B7, 'M', 'x'), + (0x1D5B8, 'M', 'y'), + (0x1D5B9, 'M', 'z'), + (0x1D5BA, 'M', 'a'), + (0x1D5BB, 'M', 'b'), + (0x1D5BC, 'M', 'c'), + (0x1D5BD, 'M', 'd'), + (0x1D5BE, 'M', 'e'), + (0x1D5BF, 'M', 'f'), + (0x1D5C0, 'M', 'g'), + (0x1D5C1, 'M', 'h'), + (0x1D5C2, 'M', 'i'), + (0x1D5C3, 'M', 'j'), + (0x1D5C4, 'M', 'k'), + (0x1D5C5, 'M', 'l'), + (0x1D5C6, 'M', 'm'), + ] + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D5C7, 'M', 'n'), + (0x1D5C8, 'M', 'o'), + (0x1D5C9, 'M', 'p'), + (0x1D5CA, 'M', 'q'), + (0x1D5CB, 'M', 'r'), + (0x1D5CC, 'M', 's'), + (0x1D5CD, 'M', 't'), + (0x1D5CE, 'M', 'u'), + (0x1D5CF, 'M', 'v'), + (0x1D5D0, 'M', 'w'), + (0x1D5D1, 'M', 'x'), + (0x1D5D2, 'M', 'y'), + (0x1D5D3, 'M', 'z'), + (0x1D5D4, 'M', 'a'), + (0x1D5D5, 'M', 'b'), + (0x1D5D6, 'M', 'c'), + (0x1D5D7, 'M', 'd'), + (0x1D5D8, 'M', 'e'), + (0x1D5D9, 'M', 'f'), + (0x1D5DA, 'M', 'g'), + (0x1D5DB, 'M', 'h'), + (0x1D5DC, 'M', 'i'), + (0x1D5DD, 'M', 'j'), + (0x1D5DE, 'M', 'k'), + (0x1D5DF, 'M', 'l'), + (0x1D5E0, 'M', 'm'), + (0x1D5E1, 'M', 'n'), + (0x1D5E2, 'M', 'o'), + (0x1D5E3, 'M', 'p'), + (0x1D5E4, 'M', 'q'), + (0x1D5E5, 'M', 'r'), + (0x1D5E6, 'M', 's'), + (0x1D5E7, 'M', 't'), + (0x1D5E8, 'M', 'u'), + (0x1D5E9, 'M', 'v'), + (0x1D5EA, 'M', 'w'), + (0x1D5EB, 'M', 'x'), + (0x1D5EC, 'M', 'y'), + (0x1D5ED, 'M', 'z'), + (0x1D5EE, 'M', 'a'), + (0x1D5EF, 'M', 'b'), + (0x1D5F0, 'M', 'c'), + (0x1D5F1, 'M', 'd'), + (0x1D5F2, 'M', 'e'), + (0x1D5F3, 'M', 'f'), + (0x1D5F4, 'M', 'g'), + (0x1D5F5, 'M', 'h'), + (0x1D5F6, 'M', 'i'), + (0x1D5F7, 'M', 'j'), + (0x1D5F8, 'M', 'k'), + (0x1D5F9, 'M', 'l'), + (0x1D5FA, 'M', 'm'), + (0x1D5FB, 'M', 'n'), + (0x1D5FC, 'M', 'o'), + (0x1D5FD, 'M', 'p'), + (0x1D5FE, 'M', 'q'), + (0x1D5FF, 'M', 'r'), + (0x1D600, 'M', 's'), + (0x1D601, 'M', 't'), + (0x1D602, 'M', 'u'), + (0x1D603, 'M', 'v'), + (0x1D604, 'M', 'w'), + (0x1D605, 'M', 'x'), + (0x1D606, 'M', 'y'), + (0x1D607, 'M', 'z'), + (0x1D608, 'M', 'a'), + (0x1D609, 'M', 'b'), + (0x1D60A, 'M', 'c'), + (0x1D60B, 'M', 'd'), + (0x1D60C, 'M', 'e'), + (0x1D60D, 'M', 'f'), + (0x1D60E, 'M', 'g'), + (0x1D60F, 'M', 'h'), + (0x1D610, 'M', 'i'), + (0x1D611, 'M', 'j'), + (0x1D612, 'M', 'k'), + (0x1D613, 'M', 'l'), + (0x1D614, 'M', 'm'), + (0x1D615, 'M', 'n'), + (0x1D616, 'M', 'o'), + (0x1D617, 'M', 'p'), + (0x1D618, 'M', 'q'), + (0x1D619, 'M', 'r'), + (0x1D61A, 'M', 's'), + (0x1D61B, 'M', 't'), + (0x1D61C, 'M', 'u'), + (0x1D61D, 'M', 'v'), + (0x1D61E, 'M', 'w'), + (0x1D61F, 'M', 'x'), + (0x1D620, 'M', 'y'), + (0x1D621, 'M', 'z'), + (0x1D622, 'M', 'a'), + (0x1D623, 'M', 'b'), + (0x1D624, 'M', 'c'), + (0x1D625, 'M', 'd'), + (0x1D626, 'M', 'e'), + (0x1D627, 'M', 'f'), + (0x1D628, 'M', 'g'), + (0x1D629, 'M', 'h'), + (0x1D62A, 'M', 'i'), + ] + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D62B, 'M', 'j'), + (0x1D62C, 'M', 'k'), + (0x1D62D, 'M', 'l'), + (0x1D62E, 'M', 'm'), + (0x1D62F, 'M', 'n'), + (0x1D630, 'M', 'o'), + (0x1D631, 'M', 'p'), + (0x1D632, 'M', 'q'), + (0x1D633, 'M', 'r'), + (0x1D634, 'M', 's'), + (0x1D635, 'M', 't'), + (0x1D636, 'M', 'u'), + (0x1D637, 'M', 'v'), + (0x1D638, 'M', 'w'), + (0x1D639, 'M', 'x'), + (0x1D63A, 'M', 'y'), + (0x1D63B, 'M', 'z'), + (0x1D63C, 'M', 'a'), + (0x1D63D, 'M', 'b'), + (0x1D63E, 'M', 'c'), + (0x1D63F, 'M', 'd'), + (0x1D640, 'M', 'e'), + (0x1D641, 'M', 'f'), + (0x1D642, 'M', 'g'), + (0x1D643, 'M', 'h'), + (0x1D644, 'M', 'i'), + (0x1D645, 'M', 'j'), + (0x1D646, 'M', 'k'), + (0x1D647, 'M', 'l'), + (0x1D648, 'M', 'm'), + (0x1D649, 'M', 'n'), + (0x1D64A, 'M', 'o'), + (0x1D64B, 'M', 'p'), + (0x1D64C, 'M', 'q'), + (0x1D64D, 'M', 'r'), + (0x1D64E, 'M', 's'), + (0x1D64F, 'M', 't'), + (0x1D650, 'M', 'u'), + (0x1D651, 'M', 'v'), + (0x1D652, 'M', 'w'), + (0x1D653, 'M', 'x'), + (0x1D654, 'M', 'y'), + (0x1D655, 'M', 'z'), + (0x1D656, 'M', 'a'), + (0x1D657, 'M', 'b'), + (0x1D658, 'M', 'c'), + (0x1D659, 'M', 'd'), + (0x1D65A, 'M', 'e'), + (0x1D65B, 'M', 'f'), + (0x1D65C, 'M', 'g'), + (0x1D65D, 'M', 'h'), + (0x1D65E, 'M', 'i'), + (0x1D65F, 'M', 'j'), + (0x1D660, 'M', 'k'), + (0x1D661, 'M', 'l'), + (0x1D662, 'M', 'm'), + (0x1D663, 'M', 'n'), + (0x1D664, 'M', 'o'), + (0x1D665, 'M', 'p'), + (0x1D666, 'M', 'q'), + (0x1D667, 'M', 'r'), + (0x1D668, 'M', 's'), + (0x1D669, 'M', 't'), + (0x1D66A, 'M', 'u'), + (0x1D66B, 'M', 'v'), + (0x1D66C, 'M', 'w'), + (0x1D66D, 'M', 'x'), + (0x1D66E, 'M', 'y'), + (0x1D66F, 'M', 'z'), + (0x1D670, 'M', 'a'), + (0x1D671, 'M', 'b'), + (0x1D672, 'M', 'c'), + (0x1D673, 'M', 'd'), + (0x1D674, 'M', 'e'), + (0x1D675, 'M', 'f'), + (0x1D676, 'M', 'g'), + (0x1D677, 'M', 'h'), + (0x1D678, 'M', 'i'), + (0x1D679, 'M', 'j'), + (0x1D67A, 'M', 'k'), + (0x1D67B, 'M', 'l'), + (0x1D67C, 'M', 'm'), + (0x1D67D, 'M', 'n'), + (0x1D67E, 'M', 'o'), + (0x1D67F, 'M', 'p'), + (0x1D680, 'M', 'q'), + (0x1D681, 'M', 'r'), + (0x1D682, 'M', 's'), + (0x1D683, 'M', 't'), + (0x1D684, 'M', 'u'), + (0x1D685, 'M', 'v'), + (0x1D686, 'M', 'w'), + (0x1D687, 'M', 'x'), + (0x1D688, 'M', 'y'), + (0x1D689, 'M', 'z'), + (0x1D68A, 'M', 'a'), + (0x1D68B, 'M', 'b'), + (0x1D68C, 'M', 'c'), + (0x1D68D, 'M', 'd'), + (0x1D68E, 'M', 'e'), + ] + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D68F, 'M', 'f'), + (0x1D690, 'M', 'g'), + (0x1D691, 'M', 'h'), + (0x1D692, 'M', 'i'), + (0x1D693, 'M', 'j'), + (0x1D694, 'M', 'k'), + (0x1D695, 'M', 'l'), + (0x1D696, 'M', 'm'), + (0x1D697, 'M', 'n'), + (0x1D698, 'M', 'o'), + (0x1D699, 'M', 'p'), + (0x1D69A, 'M', 'q'), + (0x1D69B, 'M', 'r'), + (0x1D69C, 'M', 's'), + (0x1D69D, 'M', 't'), + (0x1D69E, 'M', 'u'), + (0x1D69F, 'M', 'v'), + (0x1D6A0, 'M', 'w'), + (0x1D6A1, 'M', 'x'), + (0x1D6A2, 'M', 'y'), + (0x1D6A3, 'M', 'z'), + (0x1D6A4, 'M', 'ı'), + (0x1D6A5, 'M', 'ȷ'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', 'α'), + (0x1D6A9, 'M', 'β'), + (0x1D6AA, 'M', 'γ'), + (0x1D6AB, 'M', 'δ'), + (0x1D6AC, 'M', 'ε'), + (0x1D6AD, 'M', 'ζ'), + (0x1D6AE, 'M', 'η'), + (0x1D6AF, 'M', 'θ'), + (0x1D6B0, 'M', 'ι'), + (0x1D6B1, 'M', 'κ'), + (0x1D6B2, 'M', 'λ'), + (0x1D6B3, 'M', 'μ'), + (0x1D6B4, 'M', 'ν'), + (0x1D6B5, 'M', 'ξ'), + (0x1D6B6, 'M', 'ο'), + (0x1D6B7, 'M', 'π'), + (0x1D6B8, 'M', 'ρ'), + (0x1D6B9, 'M', 'θ'), + (0x1D6BA, 'M', 'σ'), + (0x1D6BB, 'M', 'τ'), + (0x1D6BC, 'M', 'υ'), + (0x1D6BD, 'M', 'φ'), + (0x1D6BE, 'M', 'χ'), + (0x1D6BF, 'M', 'ψ'), + (0x1D6C0, 'M', 'ω'), + (0x1D6C1, 'M', '∇'), + (0x1D6C2, 'M', 'α'), + (0x1D6C3, 'M', 'β'), + (0x1D6C4, 'M', 'γ'), + (0x1D6C5, 'M', 'δ'), + (0x1D6C6, 'M', 'ε'), + (0x1D6C7, 'M', 'ζ'), + (0x1D6C8, 'M', 'η'), + (0x1D6C9, 'M', 'θ'), + (0x1D6CA, 'M', 'ι'), + (0x1D6CB, 'M', 'κ'), + (0x1D6CC, 'M', 'λ'), + (0x1D6CD, 'M', 'μ'), + (0x1D6CE, 'M', 'ν'), + (0x1D6CF, 'M', 'ξ'), + (0x1D6D0, 'M', 'ο'), + (0x1D6D1, 'M', 'π'), + (0x1D6D2, 'M', 'ρ'), + (0x1D6D3, 'M', 'σ'), + (0x1D6D5, 'M', 'τ'), + (0x1D6D6, 'M', 'υ'), + (0x1D6D7, 'M', 'φ'), + (0x1D6D8, 'M', 'χ'), + (0x1D6D9, 'M', 'ψ'), + (0x1D6DA, 'M', 'ω'), + (0x1D6DB, 'M', '∂'), + (0x1D6DC, 'M', 'ε'), + (0x1D6DD, 'M', 'θ'), + (0x1D6DE, 'M', 'κ'), + (0x1D6DF, 'M', 'φ'), + (0x1D6E0, 'M', 'ρ'), + (0x1D6E1, 'M', 'π'), + (0x1D6E2, 'M', 'α'), + (0x1D6E3, 'M', 'β'), + (0x1D6E4, 'M', 'γ'), + (0x1D6E5, 'M', 'δ'), + (0x1D6E6, 'M', 'ε'), + (0x1D6E7, 'M', 'ζ'), + (0x1D6E8, 'M', 'η'), + (0x1D6E9, 'M', 'θ'), + (0x1D6EA, 'M', 'ι'), + (0x1D6EB, 'M', 'κ'), + (0x1D6EC, 'M', 'λ'), + (0x1D6ED, 'M', 'μ'), + (0x1D6EE, 'M', 'ν'), + (0x1D6EF, 'M', 'ξ'), + (0x1D6F0, 'M', 'ο'), + (0x1D6F1, 'M', 'π'), + (0x1D6F2, 'M', 'ρ'), + (0x1D6F3, 'M', 'θ'), + (0x1D6F4, 'M', 'σ'), + ] + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D6F5, 'M', 'τ'), + (0x1D6F6, 'M', 'υ'), + (0x1D6F7, 'M', 'φ'), + (0x1D6F8, 'M', 'χ'), + (0x1D6F9, 'M', 'ψ'), + (0x1D6FA, 'M', 'ω'), + (0x1D6FB, 'M', '∇'), + (0x1D6FC, 'M', 'α'), + (0x1D6FD, 'M', 'β'), + (0x1D6FE, 'M', 'γ'), + (0x1D6FF, 'M', 'δ'), + (0x1D700, 'M', 'ε'), + (0x1D701, 'M', 'ζ'), + (0x1D702, 'M', 'η'), + (0x1D703, 'M', 'θ'), + (0x1D704, 'M', 'ι'), + (0x1D705, 'M', 'κ'), + (0x1D706, 'M', 'λ'), + (0x1D707, 'M', 'μ'), + (0x1D708, 'M', 'ν'), + (0x1D709, 'M', 'ξ'), + (0x1D70A, 'M', 'ο'), + (0x1D70B, 'M', 'π'), + (0x1D70C, 'M', 'ρ'), + (0x1D70D, 'M', 'σ'), + (0x1D70F, 'M', 'τ'), + (0x1D710, 'M', 'υ'), + (0x1D711, 'M', 'φ'), + (0x1D712, 'M', 'χ'), + (0x1D713, 'M', 'ψ'), + (0x1D714, 'M', 'ω'), + (0x1D715, 'M', '∂'), + (0x1D716, 'M', 'ε'), + (0x1D717, 'M', 'θ'), + (0x1D718, 'M', 'κ'), + (0x1D719, 'M', 'φ'), + (0x1D71A, 'M', 'ρ'), + (0x1D71B, 'M', 'π'), + (0x1D71C, 'M', 'α'), + (0x1D71D, 'M', 'β'), + (0x1D71E, 'M', 'γ'), + (0x1D71F, 'M', 'δ'), + (0x1D720, 'M', 'ε'), + (0x1D721, 'M', 'ζ'), + (0x1D722, 'M', 'η'), + (0x1D723, 'M', 'θ'), + (0x1D724, 'M', 'ι'), + (0x1D725, 'M', 'κ'), + (0x1D726, 'M', 'λ'), + (0x1D727, 'M', 'μ'), + (0x1D728, 'M', 'ν'), + (0x1D729, 'M', 'ξ'), + (0x1D72A, 'M', 'ο'), + (0x1D72B, 'M', 'π'), + (0x1D72C, 'M', 'ρ'), + (0x1D72D, 'M', 'θ'), + (0x1D72E, 'M', 'σ'), + (0x1D72F, 'M', 'τ'), + (0x1D730, 'M', 'υ'), + (0x1D731, 'M', 'φ'), + (0x1D732, 'M', 'χ'), + (0x1D733, 'M', 'ψ'), + (0x1D734, 'M', 'ω'), + (0x1D735, 'M', '∇'), + (0x1D736, 'M', 'α'), + (0x1D737, 'M', 'β'), + (0x1D738, 'M', 'γ'), + (0x1D739, 'M', 'δ'), + (0x1D73A, 'M', 'ε'), + (0x1D73B, 'M', 'ζ'), + (0x1D73C, 'M', 'η'), + (0x1D73D, 'M', 'θ'), + (0x1D73E, 'M', 'ι'), + (0x1D73F, 'M', 'κ'), + (0x1D740, 'M', 'λ'), + (0x1D741, 'M', 'μ'), + (0x1D742, 'M', 'ν'), + (0x1D743, 'M', 'ξ'), + (0x1D744, 'M', 'ο'), + (0x1D745, 'M', 'π'), + (0x1D746, 'M', 'ρ'), + (0x1D747, 'M', 'σ'), + (0x1D749, 'M', 'τ'), + (0x1D74A, 'M', 'υ'), + (0x1D74B, 'M', 'φ'), + (0x1D74C, 'M', 'χ'), + (0x1D74D, 'M', 'ψ'), + (0x1D74E, 'M', 'ω'), + (0x1D74F, 'M', '∂'), + (0x1D750, 'M', 'ε'), + (0x1D751, 'M', 'θ'), + (0x1D752, 'M', 'κ'), + (0x1D753, 'M', 'φ'), + (0x1D754, 'M', 'ρ'), + (0x1D755, 'M', 'π'), + (0x1D756, 'M', 'α'), + (0x1D757, 'M', 'β'), + (0x1D758, 'M', 'γ'), + (0x1D759, 'M', 'δ'), + (0x1D75A, 'M', 'ε'), + ] + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D75B, 'M', 'ζ'), + (0x1D75C, 'M', 'η'), + (0x1D75D, 'M', 'θ'), + (0x1D75E, 'M', 'ι'), + (0x1D75F, 'M', 'κ'), + (0x1D760, 'M', 'λ'), + (0x1D761, 'M', 'μ'), + (0x1D762, 'M', 'ν'), + (0x1D763, 'M', 'ξ'), + (0x1D764, 'M', 'ο'), + (0x1D765, 'M', 'π'), + (0x1D766, 'M', 'ρ'), + (0x1D767, 'M', 'θ'), + (0x1D768, 'M', 'σ'), + (0x1D769, 'M', 'τ'), + (0x1D76A, 'M', 'υ'), + (0x1D76B, 'M', 'φ'), + (0x1D76C, 'M', 'χ'), + (0x1D76D, 'M', 'ψ'), + (0x1D76E, 'M', 'ω'), + (0x1D76F, 'M', '∇'), + (0x1D770, 'M', 'α'), + (0x1D771, 'M', 'β'), + (0x1D772, 'M', 'γ'), + (0x1D773, 'M', 'δ'), + (0x1D774, 'M', 'ε'), + (0x1D775, 'M', 'ζ'), + (0x1D776, 'M', 'η'), + (0x1D777, 'M', 'θ'), + (0x1D778, 'M', 'ι'), + (0x1D779, 'M', 'κ'), + (0x1D77A, 'M', 'λ'), + (0x1D77B, 'M', 'μ'), + (0x1D77C, 'M', 'ν'), + (0x1D77D, 'M', 'ξ'), + (0x1D77E, 'M', 'ο'), + (0x1D77F, 'M', 'π'), + (0x1D780, 'M', 'ρ'), + (0x1D781, 'M', 'σ'), + (0x1D783, 'M', 'τ'), + (0x1D784, 'M', 'υ'), + (0x1D785, 'M', 'φ'), + (0x1D786, 'M', 'χ'), + (0x1D787, 'M', 'ψ'), + (0x1D788, 'M', 'ω'), + (0x1D789, 'M', '∂'), + (0x1D78A, 'M', 'ε'), + (0x1D78B, 'M', 'θ'), + (0x1D78C, 'M', 'κ'), + (0x1D78D, 'M', 'φ'), + (0x1D78E, 'M', 'ρ'), + (0x1D78F, 'M', 'π'), + (0x1D790, 'M', 'α'), + (0x1D791, 'M', 'β'), + (0x1D792, 'M', 'γ'), + (0x1D793, 'M', 'δ'), + (0x1D794, 'M', 'ε'), + (0x1D795, 'M', 'ζ'), + (0x1D796, 'M', 'η'), + (0x1D797, 'M', 'θ'), + (0x1D798, 'M', 'ι'), + (0x1D799, 'M', 'κ'), + (0x1D79A, 'M', 'λ'), + (0x1D79B, 'M', 'μ'), + (0x1D79C, 'M', 'ν'), + (0x1D79D, 'M', 'ξ'), + (0x1D79E, 'M', 'ο'), + (0x1D79F, 'M', 'π'), + (0x1D7A0, 'M', 'ρ'), + (0x1D7A1, 'M', 'θ'), + (0x1D7A2, 'M', 'σ'), + (0x1D7A3, 'M', 'τ'), + (0x1D7A4, 'M', 'υ'), + (0x1D7A5, 'M', 'φ'), + (0x1D7A6, 'M', 'χ'), + (0x1D7A7, 'M', 'ψ'), + (0x1D7A8, 'M', 'ω'), + (0x1D7A9, 'M', '∇'), + (0x1D7AA, 'M', 'α'), + (0x1D7AB, 'M', 'β'), + (0x1D7AC, 'M', 'γ'), + (0x1D7AD, 'M', 'δ'), + (0x1D7AE, 'M', 'ε'), + (0x1D7AF, 'M', 'ζ'), + (0x1D7B0, 'M', 'η'), + (0x1D7B1, 'M', 'θ'), + (0x1D7B2, 'M', 'ι'), + (0x1D7B3, 'M', 'κ'), + (0x1D7B4, 'M', 'λ'), + (0x1D7B5, 'M', 'μ'), + (0x1D7B6, 'M', 'ν'), + (0x1D7B7, 'M', 'ξ'), + (0x1D7B8, 'M', 'ο'), + (0x1D7B9, 'M', 'π'), + (0x1D7BA, 'M', 'ρ'), + (0x1D7BB, 'M', 'σ'), + (0x1D7BD, 'M', 'τ'), + (0x1D7BE, 'M', 'υ'), + (0x1D7BF, 'M', 'φ'), + (0x1D7C0, 'M', 'χ'), + ] + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D7C1, 'M', 'ψ'), + (0x1D7C2, 'M', 'ω'), + (0x1D7C3, 'M', '∂'), + (0x1D7C4, 'M', 'ε'), + (0x1D7C5, 'M', 'θ'), + (0x1D7C6, 'M', 'κ'), + (0x1D7C7, 'M', 'φ'), + (0x1D7C8, 'M', 'ρ'), + (0x1D7C9, 'M', 'π'), + (0x1D7CA, 'M', 'ϝ'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', '0'), + (0x1D7CF, 'M', '1'), + (0x1D7D0, 'M', '2'), + (0x1D7D1, 'M', '3'), + (0x1D7D2, 'M', '4'), + (0x1D7D3, 'M', '5'), + (0x1D7D4, 'M', '6'), + (0x1D7D5, 'M', '7'), + (0x1D7D6, 'M', '8'), + (0x1D7D7, 'M', '9'), + (0x1D7D8, 'M', '0'), + (0x1D7D9, 'M', '1'), + (0x1D7DA, 'M', '2'), + (0x1D7DB, 'M', '3'), + (0x1D7DC, 'M', '4'), + (0x1D7DD, 'M', '5'), + (0x1D7DE, 'M', '6'), + (0x1D7DF, 'M', '7'), + (0x1D7E0, 'M', '8'), + (0x1D7E1, 'M', '9'), + (0x1D7E2, 'M', '0'), + (0x1D7E3, 'M', '1'), + (0x1D7E4, 'M', '2'), + (0x1D7E5, 'M', '3'), + (0x1D7E6, 'M', '4'), + (0x1D7E7, 'M', '5'), + (0x1D7E8, 'M', '6'), + (0x1D7E9, 'M', '7'), + (0x1D7EA, 'M', '8'), + (0x1D7EB, 'M', '9'), + (0x1D7EC, 'M', '0'), + (0x1D7ED, 'M', '1'), + (0x1D7EE, 'M', '2'), + (0x1D7EF, 'M', '3'), + (0x1D7F0, 'M', '4'), + (0x1D7F1, 'M', '5'), + (0x1D7F2, 'M', '6'), + (0x1D7F3, 'M', '7'), + (0x1D7F4, 'M', '8'), + (0x1D7F5, 'M', '9'), + (0x1D7F6, 'M', '0'), + (0x1D7F7, 'M', '1'), + (0x1D7F8, 'M', '2'), + (0x1D7F9, 'M', '3'), + (0x1D7FA, 'M', '4'), + (0x1D7FB, 'M', '5'), + (0x1D7FC, 'M', '6'), + (0x1D7FD, 'M', '7'), + (0x1D7FE, 'M', '8'), + (0x1D7FF, 'M', '9'), + (0x1D800, 'V'), + (0x1DA8C, 'X'), + (0x1DA9B, 'V'), + (0x1DAA0, 'X'), + (0x1DAA1, 'V'), + (0x1DAB0, 'X'), + (0x1DF00, 'V'), + (0x1DF1F, 'X'), + (0x1E000, 'V'), + (0x1E007, 'X'), + (0x1E008, 'V'), + (0x1E019, 'X'), + (0x1E01B, 'V'), + (0x1E022, 'X'), + (0x1E023, 'V'), + (0x1E025, 'X'), + (0x1E026, 'V'), + (0x1E02B, 'X'), + (0x1E100, 'V'), + (0x1E12D, 'X'), + (0x1E130, 'V'), + (0x1E13E, 'X'), + (0x1E140, 'V'), + (0x1E14A, 'X'), + (0x1E14E, 'V'), + (0x1E150, 'X'), + (0x1E290, 'V'), + (0x1E2AF, 'X'), + (0x1E2C0, 'V'), + (0x1E2FA, 'X'), + (0x1E2FF, 'V'), + (0x1E300, 'X'), + (0x1E7E0, 'V'), + (0x1E7E7, 'X'), + (0x1E7E8, 'V'), + (0x1E7EC, 'X'), + (0x1E7ED, 'V'), + (0x1E7EF, 'X'), + (0x1E7F0, 'V'), + ] + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E7FF, 'X'), + (0x1E800, 'V'), + (0x1E8C5, 'X'), + (0x1E8C7, 'V'), + (0x1E8D7, 'X'), + (0x1E900, 'M', '𞤢'), + (0x1E901, 'M', '𞤣'), + (0x1E902, 'M', '𞤤'), + (0x1E903, 'M', '𞤥'), + (0x1E904, 'M', '𞤦'), + (0x1E905, 'M', '𞤧'), + (0x1E906, 'M', '𞤨'), + (0x1E907, 'M', '𞤩'), + (0x1E908, 'M', '𞤪'), + (0x1E909, 'M', '𞤫'), + (0x1E90A, 'M', '𞤬'), + (0x1E90B, 'M', '𞤭'), + (0x1E90C, 'M', '𞤮'), + (0x1E90D, 'M', '𞤯'), + (0x1E90E, 'M', '𞤰'), + (0x1E90F, 'M', '𞤱'), + (0x1E910, 'M', '𞤲'), + (0x1E911, 'M', '𞤳'), + (0x1E912, 'M', '𞤴'), + (0x1E913, 'M', '𞤵'), + (0x1E914, 'M', '𞤶'), + (0x1E915, 'M', '𞤷'), + (0x1E916, 'M', '𞤸'), + (0x1E917, 'M', '𞤹'), + (0x1E918, 'M', '𞤺'), + (0x1E919, 'M', '𞤻'), + (0x1E91A, 'M', '𞤼'), + (0x1E91B, 'M', '𞤽'), + (0x1E91C, 'M', '𞤾'), + (0x1E91D, 'M', '𞤿'), + (0x1E91E, 'M', '𞥀'), + (0x1E91F, 'M', '𞥁'), + (0x1E920, 'M', '𞥂'), + (0x1E921, 'M', '𞥃'), + (0x1E922, 'V'), + (0x1E94C, 'X'), + (0x1E950, 'V'), + (0x1E95A, 'X'), + (0x1E95E, 'V'), + (0x1E960, 'X'), + (0x1EC71, 'V'), + (0x1ECB5, 'X'), + (0x1ED01, 'V'), + (0x1ED3E, 'X'), + (0x1EE00, 'M', 'ا'), + (0x1EE01, 'M', 'ب'), + (0x1EE02, 'M', 'ج'), + (0x1EE03, 'M', 'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', 'و'), + (0x1EE06, 'M', 'ز'), + (0x1EE07, 'M', 'ح'), + (0x1EE08, 'M', 'ط'), + (0x1EE09, 'M', 'ي'), + (0x1EE0A, 'M', 'ك'), + (0x1EE0B, 'M', 'ل'), + (0x1EE0C, 'M', 'م'), + (0x1EE0D, 'M', 'ن'), + (0x1EE0E, 'M', 'س'), + (0x1EE0F, 'M', 'ع'), + (0x1EE10, 'M', 'ف'), + (0x1EE11, 'M', 'ص'), + (0x1EE12, 'M', 'ق'), + (0x1EE13, 'M', 'ر'), + (0x1EE14, 'M', 'ش'), + (0x1EE15, 'M', 'ت'), + (0x1EE16, 'M', 'ث'), + (0x1EE17, 'M', 'خ'), + (0x1EE18, 'M', 'ذ'), + (0x1EE19, 'M', 'ض'), + (0x1EE1A, 'M', 'ظ'), + (0x1EE1B, 'M', 'غ'), + (0x1EE1C, 'M', 'ٮ'), + (0x1EE1D, 'M', 'ں'), + (0x1EE1E, 'M', 'ڡ'), + (0x1EE1F, 'M', 'ٯ'), + (0x1EE20, 'X'), + (0x1EE21, 'M', 'ب'), + (0x1EE22, 'M', 'ج'), + (0x1EE23, 'X'), + (0x1EE24, 'M', 'ه'), + (0x1EE25, 'X'), + (0x1EE27, 'M', 'ح'), + (0x1EE28, 'X'), + (0x1EE29, 'M', 'ي'), + (0x1EE2A, 'M', 'ك'), + (0x1EE2B, 'M', 'ل'), + (0x1EE2C, 'M', 'م'), + (0x1EE2D, 'M', 'ن'), + (0x1EE2E, 'M', 'س'), + (0x1EE2F, 'M', 'ع'), + (0x1EE30, 'M', 'ف'), + (0x1EE31, 'M', 'ص'), + (0x1EE32, 'M', 'ق'), + (0x1EE33, 'X'), + ] + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE34, 'M', 'ش'), + (0x1EE35, 'M', 'ت'), + (0x1EE36, 'M', 'ث'), + (0x1EE37, 'M', 'خ'), + (0x1EE38, 'X'), + (0x1EE39, 'M', 'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', 'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', 'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', 'ح'), + (0x1EE48, 'X'), + (0x1EE49, 'M', 'ي'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', 'ل'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', 'ن'), + (0x1EE4E, 'M', 'س'), + (0x1EE4F, 'M', 'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', 'ص'), + (0x1EE52, 'M', 'ق'), + (0x1EE53, 'X'), + (0x1EE54, 'M', 'ش'), + (0x1EE55, 'X'), + (0x1EE57, 'M', 'خ'), + (0x1EE58, 'X'), + (0x1EE59, 'M', 'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', 'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', 'ں'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', 'ٯ'), + (0x1EE60, 'X'), + (0x1EE61, 'M', 'ب'), + (0x1EE62, 'M', 'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', 'ه'), + (0x1EE65, 'X'), + (0x1EE67, 'M', 'ح'), + (0x1EE68, 'M', 'ط'), + (0x1EE69, 'M', 'ي'), + (0x1EE6A, 'M', 'ك'), + (0x1EE6B, 'X'), + (0x1EE6C, 'M', 'م'), + (0x1EE6D, 'M', 'ن'), + (0x1EE6E, 'M', 'س'), + (0x1EE6F, 'M', 'ع'), + (0x1EE70, 'M', 'ف'), + (0x1EE71, 'M', 'ص'), + (0x1EE72, 'M', 'ق'), + (0x1EE73, 'X'), + (0x1EE74, 'M', 'ش'), + (0x1EE75, 'M', 'ت'), + (0x1EE76, 'M', 'ث'), + (0x1EE77, 'M', 'خ'), + (0x1EE78, 'X'), + (0x1EE79, 'M', 'ض'), + (0x1EE7A, 'M', 'ظ'), + (0x1EE7B, 'M', 'غ'), + (0x1EE7C, 'M', 'ٮ'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', 'ڡ'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', 'ا'), + (0x1EE81, 'M', 'ب'), + (0x1EE82, 'M', 'ج'), + (0x1EE83, 'M', 'د'), + (0x1EE84, 'M', 'ه'), + (0x1EE85, 'M', 'و'), + (0x1EE86, 'M', 'ز'), + (0x1EE87, 'M', 'ح'), + (0x1EE88, 'M', 'ط'), + (0x1EE89, 'M', 'ي'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', 'ل'), + (0x1EE8C, 'M', 'م'), + (0x1EE8D, 'M', 'ن'), + (0x1EE8E, 'M', 'س'), + (0x1EE8F, 'M', 'ع'), + (0x1EE90, 'M', 'ف'), + (0x1EE91, 'M', 'ص'), + (0x1EE92, 'M', 'ق'), + (0x1EE93, 'M', 'ر'), + (0x1EE94, 'M', 'ش'), + (0x1EE95, 'M', 'ت'), + (0x1EE96, 'M', 'ث'), + (0x1EE97, 'M', 'خ'), + (0x1EE98, 'M', 'ذ'), + (0x1EE99, 'M', 'ض'), + (0x1EE9A, 'M', 'ظ'), + (0x1EE9B, 'M', 'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', 'ب'), + (0x1EEA2, 'M', 'ج'), + (0x1EEA3, 'M', 'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', 'و'), + ] + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EEA6, 'M', 'ز'), + (0x1EEA7, 'M', 'ح'), + (0x1EEA8, 'M', 'ط'), + (0x1EEA9, 'M', 'ي'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', 'ل'), + (0x1EEAC, 'M', 'م'), + (0x1EEAD, 'M', 'ن'), + (0x1EEAE, 'M', 'س'), + (0x1EEAF, 'M', 'ع'), + (0x1EEB0, 'M', 'ف'), + (0x1EEB1, 'M', 'ص'), + (0x1EEB2, 'M', 'ق'), + (0x1EEB3, 'M', 'ر'), + (0x1EEB4, 'M', 'ش'), + (0x1EEB5, 'M', 'ت'), + (0x1EEB6, 'M', 'ث'), + (0x1EEB7, 'M', 'خ'), + (0x1EEB8, 'M', 'ذ'), + (0x1EEB9, 'M', 'ض'), + (0x1EEBA, 'M', 'ظ'), + (0x1EEBB, 'M', 'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0C0, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0F6, 'X'), + (0x1F101, '3', '0,'), + (0x1F102, '3', '1,'), + (0x1F103, '3', '2,'), + (0x1F104, '3', '3,'), + (0x1F105, '3', '4,'), + (0x1F106, '3', '5,'), + (0x1F107, '3', '6,'), + (0x1F108, '3', '7,'), + (0x1F109, '3', '8,'), + (0x1F10A, '3', '9,'), + (0x1F10B, 'V'), + (0x1F110, '3', '(a)'), + (0x1F111, '3', '(b)'), + (0x1F112, '3', '(c)'), + (0x1F113, '3', '(d)'), + (0x1F114, '3', '(e)'), + (0x1F115, '3', '(f)'), + (0x1F116, '3', '(g)'), + (0x1F117, '3', '(h)'), + (0x1F118, '3', '(i)'), + (0x1F119, '3', '(j)'), + (0x1F11A, '3', '(k)'), + (0x1F11B, '3', '(l)'), + (0x1F11C, '3', '(m)'), + (0x1F11D, '3', '(n)'), + (0x1F11E, '3', '(o)'), + (0x1F11F, '3', '(p)'), + (0x1F120, '3', '(q)'), + (0x1F121, '3', '(r)'), + (0x1F122, '3', '(s)'), + (0x1F123, '3', '(t)'), + (0x1F124, '3', '(u)'), + (0x1F125, '3', '(v)'), + (0x1F126, '3', '(w)'), + (0x1F127, '3', '(x)'), + (0x1F128, '3', '(y)'), + (0x1F129, '3', '(z)'), + (0x1F12A, 'M', '〔s〕'), + (0x1F12B, 'M', 'c'), + (0x1F12C, 'M', 'r'), + (0x1F12D, 'M', 'cd'), + (0x1F12E, 'M', 'wz'), + (0x1F12F, 'V'), + (0x1F130, 'M', 'a'), + (0x1F131, 'M', 'b'), + (0x1F132, 'M', 'c'), + (0x1F133, 'M', 'd'), + (0x1F134, 'M', 'e'), + (0x1F135, 'M', 'f'), + (0x1F136, 'M', 'g'), + (0x1F137, 'M', 'h'), + (0x1F138, 'M', 'i'), + (0x1F139, 'M', 'j'), + (0x1F13A, 'M', 'k'), + (0x1F13B, 'M', 'l'), + (0x1F13C, 'M', 'm'), + (0x1F13D, 'M', 'n'), + (0x1F13E, 'M', 'o'), + (0x1F13F, 'M', 'p'), + (0x1F140, 'M', 'q'), + (0x1F141, 'M', 'r'), + (0x1F142, 'M', 's'), + (0x1F143, 'M', 't'), + ] + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F144, 'M', 'u'), + (0x1F145, 'M', 'v'), + (0x1F146, 'M', 'w'), + (0x1F147, 'M', 'x'), + (0x1F148, 'M', 'y'), + (0x1F149, 'M', 'z'), + (0x1F14A, 'M', 'hv'), + (0x1F14B, 'M', 'mv'), + (0x1F14C, 'M', 'sd'), + (0x1F14D, 'M', 'ss'), + (0x1F14E, 'M', 'ppv'), + (0x1F14F, 'M', 'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', 'mc'), + (0x1F16B, 'M', 'md'), + (0x1F16C, 'M', 'mr'), + (0x1F16D, 'V'), + (0x1F190, 'M', 'dj'), + (0x1F191, 'V'), + (0x1F1AE, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', 'ほか'), + (0x1F201, 'M', 'ココ'), + (0x1F202, 'M', 'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', '手'), + (0x1F211, 'M', '字'), + (0x1F212, 'M', '双'), + (0x1F213, 'M', 'デ'), + (0x1F214, 'M', '二'), + (0x1F215, 'M', '多'), + (0x1F216, 'M', '解'), + (0x1F217, 'M', '天'), + (0x1F218, 'M', '交'), + (0x1F219, 'M', '映'), + (0x1F21A, 'M', '無'), + (0x1F21B, 'M', '料'), + (0x1F21C, 'M', '前'), + (0x1F21D, 'M', '後'), + (0x1F21E, 'M', '再'), + (0x1F21F, 'M', '新'), + (0x1F220, 'M', '初'), + (0x1F221, 'M', '終'), + (0x1F222, 'M', '生'), + (0x1F223, 'M', '販'), + (0x1F224, 'M', '声'), + (0x1F225, 'M', '吹'), + (0x1F226, 'M', '演'), + (0x1F227, 'M', '投'), + (0x1F228, 'M', '捕'), + (0x1F229, 'M', '一'), + (0x1F22A, 'M', '三'), + (0x1F22B, 'M', '遊'), + (0x1F22C, 'M', '左'), + (0x1F22D, 'M', '中'), + (0x1F22E, 'M', '右'), + (0x1F22F, 'M', '指'), + (0x1F230, 'M', '走'), + (0x1F231, 'M', '打'), + (0x1F232, 'M', '禁'), + (0x1F233, 'M', '空'), + (0x1F234, 'M', '合'), + (0x1F235, 'M', '満'), + (0x1F236, 'M', '有'), + (0x1F237, 'M', '月'), + (0x1F238, 'M', '申'), + (0x1F239, 'M', '割'), + (0x1F23A, 'M', '営'), + (0x1F23B, 'M', '配'), + (0x1F23C, 'X'), + (0x1F240, 'M', '〔本〕'), + (0x1F241, 'M', '〔三〕'), + (0x1F242, 'M', '〔二〕'), + (0x1F243, 'M', '〔安〕'), + (0x1F244, 'M', '〔点〕'), + (0x1F245, 'M', '〔打〕'), + (0x1F246, 'M', '〔盗〕'), + (0x1F247, 'M', '〔勝〕'), + (0x1F248, 'M', '〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', '得'), + (0x1F251, 'M', '可'), + (0x1F252, 'X'), + (0x1F260, 'V'), + (0x1F266, 'X'), + (0x1F300, 'V'), + (0x1F6D8, 'X'), + (0x1F6DD, 'V'), + (0x1F6ED, 'X'), + (0x1F6F0, 'V'), + (0x1F6FD, 'X'), + (0x1F700, 'V'), + (0x1F774, 'X'), + (0x1F780, 'V'), + (0x1F7D9, 'X'), + (0x1F7E0, 'V'), + (0x1F7EC, 'X'), + (0x1F7F0, 'V'), + (0x1F7F1, 'X'), + (0x1F800, 'V'), + ] + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F80C, 'X'), + (0x1F810, 'V'), + (0x1F848, 'X'), + (0x1F850, 'V'), + (0x1F85A, 'X'), + (0x1F860, 'V'), + (0x1F888, 'X'), + (0x1F890, 'V'), + (0x1F8AE, 'X'), + (0x1F8B0, 'V'), + (0x1F8B2, 'X'), + (0x1F900, 'V'), + (0x1FA54, 'X'), + (0x1FA60, 'V'), + (0x1FA6E, 'X'), + (0x1FA70, 'V'), + (0x1FA75, 'X'), + (0x1FA78, 'V'), + (0x1FA7D, 'X'), + (0x1FA80, 'V'), + (0x1FA87, 'X'), + (0x1FA90, 'V'), + (0x1FAAD, 'X'), + (0x1FAB0, 'V'), + (0x1FABB, 'X'), + (0x1FAC0, 'V'), + (0x1FAC6, 'X'), + (0x1FAD0, 'V'), + (0x1FADA, 'X'), + (0x1FAE0, 'V'), + (0x1FAE8, 'X'), + (0x1FAF0, 'V'), + (0x1FAF7, 'X'), + (0x1FB00, 'V'), + (0x1FB93, 'X'), + (0x1FB94, 'V'), + (0x1FBCB, 'X'), + (0x1FBF0, 'M', '0'), + (0x1FBF1, 'M', '1'), + (0x1FBF2, 'M', '2'), + (0x1FBF3, 'M', '3'), + (0x1FBF4, 'M', '4'), + (0x1FBF5, 'M', '5'), + (0x1FBF6, 'M', '6'), + (0x1FBF7, 'M', '7'), + (0x1FBF8, 'M', '8'), + (0x1FBF9, 'M', '9'), + (0x1FBFA, 'X'), + (0x20000, 'V'), + (0x2A6E0, 'X'), + (0x2A700, 'V'), + (0x2B739, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2B820, 'V'), + (0x2CEA2, 'X'), + (0x2CEB0, 'V'), + (0x2EBE1, 'X'), + (0x2F800, 'M', '丽'), + (0x2F801, 'M', '丸'), + (0x2F802, 'M', '乁'), + (0x2F803, 'M', '𠄢'), + (0x2F804, 'M', '你'), + (0x2F805, 'M', '侮'), + (0x2F806, 'M', '侻'), + (0x2F807, 'M', '倂'), + (0x2F808, 'M', '偺'), + (0x2F809, 'M', '備'), + (0x2F80A, 'M', '僧'), + (0x2F80B, 'M', '像'), + (0x2F80C, 'M', '㒞'), + (0x2F80D, 'M', '𠘺'), + (0x2F80E, 'M', '免'), + (0x2F80F, 'M', '兔'), + (0x2F810, 'M', '兤'), + (0x2F811, 'M', '具'), + (0x2F812, 'M', '𠔜'), + (0x2F813, 'M', '㒹'), + (0x2F814, 'M', '內'), + (0x2F815, 'M', '再'), + (0x2F816, 'M', '𠕋'), + (0x2F817, 'M', '冗'), + (0x2F818, 'M', '冤'), + (0x2F819, 'M', '仌'), + (0x2F81A, 'M', '冬'), + (0x2F81B, 'M', '况'), + (0x2F81C, 'M', '𩇟'), + (0x2F81D, 'M', '凵'), + (0x2F81E, 'M', '刃'), + (0x2F81F, 'M', '㓟'), + (0x2F820, 'M', '刻'), + (0x2F821, 'M', '剆'), + (0x2F822, 'M', '割'), + (0x2F823, 'M', '剷'), + (0x2F824, 'M', '㔕'), + (0x2F825, 'M', '勇'), + (0x2F826, 'M', '勉'), + (0x2F827, 'M', '勤'), + (0x2F828, 'M', '勺'), + (0x2F829, 'M', '包'), + ] + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F82A, 'M', '匆'), + (0x2F82B, 'M', '北'), + (0x2F82C, 'M', '卉'), + (0x2F82D, 'M', '卑'), + (0x2F82E, 'M', '博'), + (0x2F82F, 'M', '即'), + (0x2F830, 'M', '卽'), + (0x2F831, 'M', '卿'), + (0x2F834, 'M', '𠨬'), + (0x2F835, 'M', '灰'), + (0x2F836, 'M', '及'), + (0x2F837, 'M', '叟'), + (0x2F838, 'M', '𠭣'), + (0x2F839, 'M', '叫'), + (0x2F83A, 'M', '叱'), + (0x2F83B, 'M', '吆'), + (0x2F83C, 'M', '咞'), + (0x2F83D, 'M', '吸'), + (0x2F83E, 'M', '呈'), + (0x2F83F, 'M', '周'), + (0x2F840, 'M', '咢'), + (0x2F841, 'M', '哶'), + (0x2F842, 'M', '唐'), + (0x2F843, 'M', '啓'), + (0x2F844, 'M', '啣'), + (0x2F845, 'M', '善'), + (0x2F847, 'M', '喙'), + (0x2F848, 'M', '喫'), + (0x2F849, 'M', '喳'), + (0x2F84A, 'M', '嗂'), + (0x2F84B, 'M', '圖'), + (0x2F84C, 'M', '嘆'), + (0x2F84D, 'M', '圗'), + (0x2F84E, 'M', '噑'), + (0x2F84F, 'M', '噴'), + (0x2F850, 'M', '切'), + (0x2F851, 'M', '壮'), + (0x2F852, 'M', '城'), + (0x2F853, 'M', '埴'), + (0x2F854, 'M', '堍'), + (0x2F855, 'M', '型'), + (0x2F856, 'M', '堲'), + (0x2F857, 'M', '報'), + (0x2F858, 'M', '墬'), + (0x2F859, 'M', '𡓤'), + (0x2F85A, 'M', '売'), + (0x2F85B, 'M', '壷'), + (0x2F85C, 'M', '夆'), + (0x2F85D, 'M', '多'), + (0x2F85E, 'M', '夢'), + (0x2F85F, 'M', '奢'), + (0x2F860, 'M', '𡚨'), + (0x2F861, 'M', '𡛪'), + (0x2F862, 'M', '姬'), + (0x2F863, 'M', '娛'), + (0x2F864, 'M', '娧'), + (0x2F865, 'M', '姘'), + (0x2F866, 'M', '婦'), + (0x2F867, 'M', '㛮'), + (0x2F868, 'X'), + (0x2F869, 'M', '嬈'), + (0x2F86A, 'M', '嬾'), + (0x2F86C, 'M', '𡧈'), + (0x2F86D, 'M', '寃'), + (0x2F86E, 'M', '寘'), + (0x2F86F, 'M', '寧'), + (0x2F870, 'M', '寳'), + (0x2F871, 'M', '𡬘'), + (0x2F872, 'M', '寿'), + (0x2F873, 'M', '将'), + (0x2F874, 'X'), + (0x2F875, 'M', '尢'), + (0x2F876, 'M', '㞁'), + (0x2F877, 'M', '屠'), + (0x2F878, 'M', '屮'), + (0x2F879, 'M', '峀'), + (0x2F87A, 'M', '岍'), + (0x2F87B, 'M', '𡷤'), + (0x2F87C, 'M', '嵃'), + (0x2F87D, 'M', '𡷦'), + (0x2F87E, 'M', '嵮'), + (0x2F87F, 'M', '嵫'), + (0x2F880, 'M', '嵼'), + (0x2F881, 'M', '巡'), + (0x2F882, 'M', '巢'), + (0x2F883, 'M', '㠯'), + (0x2F884, 'M', '巽'), + (0x2F885, 'M', '帨'), + (0x2F886, 'M', '帽'), + (0x2F887, 'M', '幩'), + (0x2F888, 'M', '㡢'), + (0x2F889, 'M', '𢆃'), + (0x2F88A, 'M', '㡼'), + (0x2F88B, 'M', '庰'), + (0x2F88C, 'M', '庳'), + (0x2F88D, 'M', '庶'), + (0x2F88E, 'M', '廊'), + (0x2F88F, 'M', '𪎒'), + (0x2F890, 'M', '廾'), + (0x2F891, 'M', '𢌱'), + ] + +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F893, 'M', '舁'), + (0x2F894, 'M', '弢'), + (0x2F896, 'M', '㣇'), + (0x2F897, 'M', '𣊸'), + (0x2F898, 'M', '𦇚'), + (0x2F899, 'M', '形'), + (0x2F89A, 'M', '彫'), + (0x2F89B, 'M', '㣣'), + (0x2F89C, 'M', '徚'), + (0x2F89D, 'M', '忍'), + (0x2F89E, 'M', '志'), + (0x2F89F, 'M', '忹'), + (0x2F8A0, 'M', '悁'), + (0x2F8A1, 'M', '㤺'), + (0x2F8A2, 'M', '㤜'), + (0x2F8A3, 'M', '悔'), + (0x2F8A4, 'M', '𢛔'), + (0x2F8A5, 'M', '惇'), + (0x2F8A6, 'M', '慈'), + (0x2F8A7, 'M', '慌'), + (0x2F8A8, 'M', '慎'), + (0x2F8A9, 'M', '慌'), + (0x2F8AA, 'M', '慺'), + (0x2F8AB, 'M', '憎'), + (0x2F8AC, 'M', '憲'), + (0x2F8AD, 'M', '憤'), + (0x2F8AE, 'M', '憯'), + (0x2F8AF, 'M', '懞'), + (0x2F8B0, 'M', '懲'), + (0x2F8B1, 'M', '懶'), + (0x2F8B2, 'M', '成'), + (0x2F8B3, 'M', '戛'), + (0x2F8B4, 'M', '扝'), + (0x2F8B5, 'M', '抱'), + (0x2F8B6, 'M', '拔'), + (0x2F8B7, 'M', '捐'), + (0x2F8B8, 'M', '𢬌'), + (0x2F8B9, 'M', '挽'), + (0x2F8BA, 'M', '拼'), + (0x2F8BB, 'M', '捨'), + (0x2F8BC, 'M', '掃'), + (0x2F8BD, 'M', '揤'), + (0x2F8BE, 'M', '𢯱'), + (0x2F8BF, 'M', '搢'), + (0x2F8C0, 'M', '揅'), + (0x2F8C1, 'M', '掩'), + (0x2F8C2, 'M', '㨮'), + (0x2F8C3, 'M', '摩'), + (0x2F8C4, 'M', '摾'), + (0x2F8C5, 'M', '撝'), + (0x2F8C6, 'M', '摷'), + (0x2F8C7, 'M', '㩬'), + (0x2F8C8, 'M', '敏'), + (0x2F8C9, 'M', '敬'), + (0x2F8CA, 'M', '𣀊'), + (0x2F8CB, 'M', '旣'), + (0x2F8CC, 'M', '書'), + (0x2F8CD, 'M', '晉'), + (0x2F8CE, 'M', '㬙'), + (0x2F8CF, 'M', '暑'), + (0x2F8D0, 'M', '㬈'), + (0x2F8D1, 'M', '㫤'), + (0x2F8D2, 'M', '冒'), + (0x2F8D3, 'M', '冕'), + (0x2F8D4, 'M', '最'), + (0x2F8D5, 'M', '暜'), + (0x2F8D6, 'M', '肭'), + (0x2F8D7, 'M', '䏙'), + (0x2F8D8, 'M', '朗'), + (0x2F8D9, 'M', '望'), + (0x2F8DA, 'M', '朡'), + (0x2F8DB, 'M', '杞'), + (0x2F8DC, 'M', '杓'), + (0x2F8DD, 'M', '𣏃'), + (0x2F8DE, 'M', '㭉'), + (0x2F8DF, 'M', '柺'), + (0x2F8E0, 'M', '枅'), + (0x2F8E1, 'M', '桒'), + (0x2F8E2, 'M', '梅'), + (0x2F8E3, 'M', '𣑭'), + (0x2F8E4, 'M', '梎'), + (0x2F8E5, 'M', '栟'), + (0x2F8E6, 'M', '椔'), + (0x2F8E7, 'M', '㮝'), + (0x2F8E8, 'M', '楂'), + (0x2F8E9, 'M', '榣'), + (0x2F8EA, 'M', '槪'), + (0x2F8EB, 'M', '檨'), + (0x2F8EC, 'M', '𣚣'), + (0x2F8ED, 'M', '櫛'), + (0x2F8EE, 'M', '㰘'), + (0x2F8EF, 'M', '次'), + (0x2F8F0, 'M', '𣢧'), + (0x2F8F1, 'M', '歔'), + (0x2F8F2, 'M', '㱎'), + (0x2F8F3, 'M', '歲'), + (0x2F8F4, 'M', '殟'), + (0x2F8F5, 'M', '殺'), + (0x2F8F6, 'M', '殻'), + (0x2F8F7, 'M', '𣪍'), + ] + +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F8F8, 'M', '𡴋'), + (0x2F8F9, 'M', '𣫺'), + (0x2F8FA, 'M', '汎'), + (0x2F8FB, 'M', '𣲼'), + (0x2F8FC, 'M', '沿'), + (0x2F8FD, 'M', '泍'), + (0x2F8FE, 'M', '汧'), + (0x2F8FF, 'M', '洖'), + (0x2F900, 'M', '派'), + (0x2F901, 'M', '海'), + (0x2F902, 'M', '流'), + (0x2F903, 'M', '浩'), + (0x2F904, 'M', '浸'), + (0x2F905, 'M', '涅'), + (0x2F906, 'M', '𣴞'), + (0x2F907, 'M', '洴'), + (0x2F908, 'M', '港'), + (0x2F909, 'M', '湮'), + (0x2F90A, 'M', '㴳'), + (0x2F90B, 'M', '滋'), + (0x2F90C, 'M', '滇'), + (0x2F90D, 'M', '𣻑'), + (0x2F90E, 'M', '淹'), + (0x2F90F, 'M', '潮'), + (0x2F910, 'M', '𣽞'), + (0x2F911, 'M', '𣾎'), + (0x2F912, 'M', '濆'), + (0x2F913, 'M', '瀹'), + (0x2F914, 'M', '瀞'), + (0x2F915, 'M', '瀛'), + (0x2F916, 'M', '㶖'), + (0x2F917, 'M', '灊'), + (0x2F918, 'M', '災'), + (0x2F919, 'M', '灷'), + (0x2F91A, 'M', '炭'), + (0x2F91B, 'M', '𠔥'), + (0x2F91C, 'M', '煅'), + (0x2F91D, 'M', '𤉣'), + (0x2F91E, 'M', '熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', '爨'), + (0x2F921, 'M', '爵'), + (0x2F922, 'M', '牐'), + (0x2F923, 'M', '𤘈'), + (0x2F924, 'M', '犀'), + (0x2F925, 'M', '犕'), + (0x2F926, 'M', '𤜵'), + (0x2F927, 'M', '𤠔'), + (0x2F928, 'M', '獺'), + (0x2F929, 'M', '王'), + (0x2F92A, 'M', '㺬'), + (0x2F92B, 'M', '玥'), + (0x2F92C, 'M', '㺸'), + (0x2F92E, 'M', '瑇'), + (0x2F92F, 'M', '瑜'), + (0x2F930, 'M', '瑱'), + (0x2F931, 'M', '璅'), + (0x2F932, 'M', '瓊'), + (0x2F933, 'M', '㼛'), + (0x2F934, 'M', '甤'), + (0x2F935, 'M', '𤰶'), + (0x2F936, 'M', '甾'), + (0x2F937, 'M', '𤲒'), + (0x2F938, 'M', '異'), + (0x2F939, 'M', '𢆟'), + (0x2F93A, 'M', '瘐'), + (0x2F93B, 'M', '𤾡'), + (0x2F93C, 'M', '𤾸'), + (0x2F93D, 'M', '𥁄'), + (0x2F93E, 'M', '㿼'), + (0x2F93F, 'M', '䀈'), + (0x2F940, 'M', '直'), + (0x2F941, 'M', '𥃳'), + (0x2F942, 'M', '𥃲'), + (0x2F943, 'M', '𥄙'), + (0x2F944, 'M', '𥄳'), + (0x2F945, 'M', '眞'), + (0x2F946, 'M', '真'), + (0x2F948, 'M', '睊'), + (0x2F949, 'M', '䀹'), + (0x2F94A, 'M', '瞋'), + (0x2F94B, 'M', '䁆'), + (0x2F94C, 'M', '䂖'), + (0x2F94D, 'M', '𥐝'), + (0x2F94E, 'M', '硎'), + (0x2F94F, 'M', '碌'), + (0x2F950, 'M', '磌'), + (0x2F951, 'M', '䃣'), + (0x2F952, 'M', '𥘦'), + (0x2F953, 'M', '祖'), + (0x2F954, 'M', '𥚚'), + (0x2F955, 'M', '𥛅'), + (0x2F956, 'M', '福'), + (0x2F957, 'M', '秫'), + (0x2F958, 'M', '䄯'), + (0x2F959, 'M', '穀'), + (0x2F95A, 'M', '穊'), + (0x2F95B, 'M', '穏'), + (0x2F95C, 'M', '𥥼'), + (0x2F95D, 'M', '𥪧'), + ] + +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F95F, 'X'), + (0x2F960, 'M', '䈂'), + (0x2F961, 'M', '𥮫'), + (0x2F962, 'M', '篆'), + (0x2F963, 'M', '築'), + (0x2F964, 'M', '䈧'), + (0x2F965, 'M', '𥲀'), + (0x2F966, 'M', '糒'), + (0x2F967, 'M', '䊠'), + (0x2F968, 'M', '糨'), + (0x2F969, 'M', '糣'), + (0x2F96A, 'M', '紀'), + (0x2F96B, 'M', '𥾆'), + (0x2F96C, 'M', '絣'), + (0x2F96D, 'M', '䌁'), + (0x2F96E, 'M', '緇'), + (0x2F96F, 'M', '縂'), + (0x2F970, 'M', '繅'), + (0x2F971, 'M', '䌴'), + (0x2F972, 'M', '𦈨'), + (0x2F973, 'M', '𦉇'), + (0x2F974, 'M', '䍙'), + (0x2F975, 'M', '𦋙'), + (0x2F976, 'M', '罺'), + (0x2F977, 'M', '𦌾'), + (0x2F978, 'M', '羕'), + (0x2F979, 'M', '翺'), + (0x2F97A, 'M', '者'), + (0x2F97B, 'M', '𦓚'), + (0x2F97C, 'M', '𦔣'), + (0x2F97D, 'M', '聠'), + (0x2F97E, 'M', '𦖨'), + (0x2F97F, 'M', '聰'), + (0x2F980, 'M', '𣍟'), + (0x2F981, 'M', '䏕'), + (0x2F982, 'M', '育'), + (0x2F983, 'M', '脃'), + (0x2F984, 'M', '䐋'), + (0x2F985, 'M', '脾'), + (0x2F986, 'M', '媵'), + (0x2F987, 'M', '𦞧'), + (0x2F988, 'M', '𦞵'), + (0x2F989, 'M', '𣎓'), + (0x2F98A, 'M', '𣎜'), + (0x2F98B, 'M', '舁'), + (0x2F98C, 'M', '舄'), + (0x2F98D, 'M', '辞'), + (0x2F98E, 'M', '䑫'), + (0x2F98F, 'M', '芑'), + (0x2F990, 'M', '芋'), + (0x2F991, 'M', '芝'), + (0x2F992, 'M', '劳'), + (0x2F993, 'M', '花'), + (0x2F994, 'M', '芳'), + (0x2F995, 'M', '芽'), + (0x2F996, 'M', '苦'), + (0x2F997, 'M', '𦬼'), + (0x2F998, 'M', '若'), + (0x2F999, 'M', '茝'), + (0x2F99A, 'M', '荣'), + (0x2F99B, 'M', '莭'), + (0x2F99C, 'M', '茣'), + (0x2F99D, 'M', '莽'), + (0x2F99E, 'M', '菧'), + (0x2F99F, 'M', '著'), + (0x2F9A0, 'M', '荓'), + (0x2F9A1, 'M', '菊'), + (0x2F9A2, 'M', '菌'), + (0x2F9A3, 'M', '菜'), + (0x2F9A4, 'M', '𦰶'), + (0x2F9A5, 'M', '𦵫'), + (0x2F9A6, 'M', '𦳕'), + (0x2F9A7, 'M', '䔫'), + (0x2F9A8, 'M', '蓱'), + (0x2F9A9, 'M', '蓳'), + (0x2F9AA, 'M', '蔖'), + (0x2F9AB, 'M', '𧏊'), + (0x2F9AC, 'M', '蕤'), + (0x2F9AD, 'M', '𦼬'), + (0x2F9AE, 'M', '䕝'), + (0x2F9AF, 'M', '䕡'), + (0x2F9B0, 'M', '𦾱'), + (0x2F9B1, 'M', '𧃒'), + (0x2F9B2, 'M', '䕫'), + (0x2F9B3, 'M', '虐'), + (0x2F9B4, 'M', '虜'), + (0x2F9B5, 'M', '虧'), + (0x2F9B6, 'M', '虩'), + (0x2F9B7, 'M', '蚩'), + (0x2F9B8, 'M', '蚈'), + (0x2F9B9, 'M', '蜎'), + (0x2F9BA, 'M', '蛢'), + (0x2F9BB, 'M', '蝹'), + (0x2F9BC, 'M', '蜨'), + (0x2F9BD, 'M', '蝫'), + (0x2F9BE, 'M', '螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', '蟡'), + (0x2F9C1, 'M', '蠁'), + (0x2F9C2, 'M', '䗹'), + ] + +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F9C3, 'M', '衠'), + (0x2F9C4, 'M', '衣'), + (0x2F9C5, 'M', '𧙧'), + (0x2F9C6, 'M', '裗'), + (0x2F9C7, 'M', '裞'), + (0x2F9C8, 'M', '䘵'), + (0x2F9C9, 'M', '裺'), + (0x2F9CA, 'M', '㒻'), + (0x2F9CB, 'M', '𧢮'), + (0x2F9CC, 'M', '𧥦'), + (0x2F9CD, 'M', '䚾'), + (0x2F9CE, 'M', '䛇'), + (0x2F9CF, 'M', '誠'), + (0x2F9D0, 'M', '諭'), + (0x2F9D1, 'M', '變'), + (0x2F9D2, 'M', '豕'), + (0x2F9D3, 'M', '𧲨'), + (0x2F9D4, 'M', '貫'), + (0x2F9D5, 'M', '賁'), + (0x2F9D6, 'M', '贛'), + (0x2F9D7, 'M', '起'), + (0x2F9D8, 'M', '𧼯'), + (0x2F9D9, 'M', '𠠄'), + (0x2F9DA, 'M', '跋'), + (0x2F9DB, 'M', '趼'), + (0x2F9DC, 'M', '跰'), + (0x2F9DD, 'M', '𠣞'), + (0x2F9DE, 'M', '軔'), + (0x2F9DF, 'M', '輸'), + (0x2F9E0, 'M', '𨗒'), + (0x2F9E1, 'M', '𨗭'), + (0x2F9E2, 'M', '邔'), + (0x2F9E3, 'M', '郱'), + (0x2F9E4, 'M', '鄑'), + (0x2F9E5, 'M', '𨜮'), + (0x2F9E6, 'M', '鄛'), + (0x2F9E7, 'M', '鈸'), + (0x2F9E8, 'M', '鋗'), + (0x2F9E9, 'M', '鋘'), + (0x2F9EA, 'M', '鉼'), + (0x2F9EB, 'M', '鏹'), + (0x2F9EC, 'M', '鐕'), + (0x2F9ED, 'M', '𨯺'), + (0x2F9EE, 'M', '開'), + (0x2F9EF, 'M', '䦕'), + (0x2F9F0, 'M', '閷'), + (0x2F9F1, 'M', '𨵷'), + (0x2F9F2, 'M', '䧦'), + (0x2F9F3, 'M', '雃'), + (0x2F9F4, 'M', '嶲'), + (0x2F9F5, 'M', '霣'), + (0x2F9F6, 'M', '𩅅'), + (0x2F9F7, 'M', '𩈚'), + (0x2F9F8, 'M', '䩮'), + (0x2F9F9, 'M', '䩶'), + (0x2F9FA, 'M', '韠'), + (0x2F9FB, 'M', '𩐊'), + (0x2F9FC, 'M', '䪲'), + (0x2F9FD, 'M', '𩒖'), + (0x2F9FE, 'M', '頋'), + (0x2FA00, 'M', '頩'), + (0x2FA01, 'M', '𩖶'), + (0x2FA02, 'M', '飢'), + (0x2FA03, 'M', '䬳'), + (0x2FA04, 'M', '餩'), + (0x2FA05, 'M', '馧'), + (0x2FA06, 'M', '駂'), + (0x2FA07, 'M', '駾'), + (0x2FA08, 'M', '䯎'), + (0x2FA09, 'M', '𩬰'), + (0x2FA0A, 'M', '鬒'), + (0x2FA0B, 'M', '鱀'), + (0x2FA0C, 'M', '鳽'), + (0x2FA0D, 'M', '䳎'), + (0x2FA0E, 'M', '䳭'), + (0x2FA0F, 'M', '鵧'), + (0x2FA10, 'M', '𪃎'), + (0x2FA11, 'M', '䳸'), + (0x2FA12, 'M', '𪄅'), + (0x2FA13, 'M', '𪈎'), + (0x2FA14, 'M', '𪊑'), + (0x2FA15, 'M', '麻'), + (0x2FA16, 'M', '䵖'), + (0x2FA17, 'M', '黹'), + (0x2FA18, 'M', '黾'), + (0x2FA19, 'M', '鼅'), + (0x2FA1A, 'M', '鼏'), + (0x2FA1B, 'M', '鼖'), + (0x2FA1C, 'M', '鼻'), + (0x2FA1D, 'M', '𪘀'), + (0x2FA1E, 'X'), + (0x30000, 'V'), + (0x3134B, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), + ] + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() + + _seg_79() + + _seg_80() +) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/venv/lib/python3.10/site-packages/jose/__init__.py b/venv/lib/python3.10/site-packages/jose/__init__.py new file mode 100644 index 0000000..054baa7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/__init__.py @@ -0,0 +1,10 @@ +__version__ = "3.3.0" +__author__ = "Michael Davis" +__license__ = "MIT" +__copyright__ = "Copyright 2016 Michael Davis" + + +from .exceptions import ExpiredSignatureError # noqa: F401 +from .exceptions import JOSEError # noqa: F401 +from .exceptions import JWSError # noqa: F401 +from .exceptions import JWTError # noqa: F401 diff --git a/venv/lib/python3.10/site-packages/jose/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..2d03020 Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/__pycache__/constants.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/__pycache__/constants.cpython-310.pyc new file mode 100644 index 0000000..7d024fa Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/__pycache__/constants.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/__pycache__/exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000..f3c2627 Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/__pycache__/exceptions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/__pycache__/jwe.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/__pycache__/jwe.cpython-310.pyc new file mode 100644 index 0000000..3645825 Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/__pycache__/jwe.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/__pycache__/jwk.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/__pycache__/jwk.cpython-310.pyc new file mode 100644 index 0000000..1a7f4cf Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/__pycache__/jwk.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/__pycache__/jws.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/__pycache__/jws.cpython-310.pyc new file mode 100644 index 0000000..d32a690 Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/__pycache__/jws.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/__pycache__/jwt.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/__pycache__/jwt.cpython-310.pyc new file mode 100644 index 0000000..523c6a9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/__pycache__/jwt.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000..10973d7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/backends/__init__.py b/venv/lib/python3.10/site-packages/jose/backends/__init__.py new file mode 100644 index 0000000..e7bba69 --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/backends/__init__.py @@ -0,0 +1,32 @@ +try: + from jose.backends.cryptography_backend import get_random_bytes # noqa: F401 +except ImportError: + try: + from jose.backends.pycrypto_backend import get_random_bytes # noqa: F401 + except ImportError: + from jose.backends.native import get_random_bytes # noqa: F401 + +try: + from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey # noqa: F401 +except ImportError: + try: + from jose.backends.rsa_backend import RSAKey # noqa: F401 + except ImportError: + RSAKey = None + +try: + from jose.backends.cryptography_backend import CryptographyECKey as ECKey # noqa: F401 +except ImportError: + from jose.backends.ecdsa_backend import ECDSAECKey as ECKey # noqa: F401 + +try: + from jose.backends.cryptography_backend import CryptographyAESKey as AESKey # noqa: F401 +except ImportError: + AESKey = None + +try: + from jose.backends.cryptography_backend import CryptographyHMACKey as HMACKey # noqa: F401 +except ImportError: + from jose.backends.native import HMACKey # noqa: F401 + +from .base import DIRKey # noqa: F401 diff --git a/venv/lib/python3.10/site-packages/jose/backends/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..214b24c Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/backends/__pycache__/_asn1.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/_asn1.cpython-310.pyc new file mode 100644 index 0000000..5211a3e Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/_asn1.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/backends/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000..027eda9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/base.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/backends/__pycache__/cryptography_backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/cryptography_backend.cpython-310.pyc new file mode 100644 index 0000000..df9c654 Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/cryptography_backend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/backends/__pycache__/ecdsa_backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/ecdsa_backend.cpython-310.pyc new file mode 100644 index 0000000..7b76a6d Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/ecdsa_backend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/backends/__pycache__/native.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/native.cpython-310.pyc new file mode 100644 index 0000000..dcaec75 Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/native.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/backends/__pycache__/rsa_backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/rsa_backend.cpython-310.pyc new file mode 100644 index 0000000..fef959a Binary files /dev/null and b/venv/lib/python3.10/site-packages/jose/backends/__pycache__/rsa_backend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/jose/backends/_asn1.py b/venv/lib/python3.10/site-packages/jose/backends/_asn1.py new file mode 100644 index 0000000..af5fa8b --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/backends/_asn1.py @@ -0,0 +1,83 @@ +"""ASN1 encoding helpers for converting between PKCS1 and PKCS8. + +Required by rsa_backend but not cryptography_backend. +""" +from pyasn1.codec.der import decoder, encoder +from pyasn1.type import namedtype, univ + +RSA_ENCRYPTION_ASN1_OID = "1.2.840.113549.1.1.1" + + +class RsaAlgorithmIdentifier(univ.Sequence): + """ASN1 structure for recording RSA PrivateKeyAlgorithm identifiers.""" + + componentType = namedtype.NamedTypes( + namedtype.NamedType("rsaEncryption", univ.ObjectIdentifier()), namedtype.NamedType("parameters", univ.Null()) + ) + + +class PKCS8PrivateKey(univ.Sequence): + """ASN1 structure for recording PKCS8 private keys.""" + + componentType = namedtype.NamedTypes( + namedtype.NamedType("version", univ.Integer()), + namedtype.NamedType("privateKeyAlgorithm", RsaAlgorithmIdentifier()), + namedtype.NamedType("privateKey", univ.OctetString()), + ) + + +class PublicKeyInfo(univ.Sequence): + """ASN1 structure for recording PKCS8 public keys.""" + + componentType = namedtype.NamedTypes( + namedtype.NamedType("algorithm", RsaAlgorithmIdentifier()), namedtype.NamedType("publicKey", univ.BitString()) + ) + + +def rsa_private_key_pkcs8_to_pkcs1(pkcs8_key): + """Convert a PKCS8-encoded RSA private key to PKCS1.""" + decoded_values = decoder.decode(pkcs8_key, asn1Spec=PKCS8PrivateKey()) + + try: + decoded_key = decoded_values[0] + except IndexError: + raise ValueError("Invalid private key encoding") + + return decoded_key["privateKey"] + + +def rsa_private_key_pkcs1_to_pkcs8(pkcs1_key): + """Convert a PKCS1-encoded RSA private key to PKCS8.""" + algorithm = RsaAlgorithmIdentifier() + algorithm["rsaEncryption"] = RSA_ENCRYPTION_ASN1_OID + + pkcs8_key = PKCS8PrivateKey() + pkcs8_key["version"] = 0 + pkcs8_key["privateKeyAlgorithm"] = algorithm + pkcs8_key["privateKey"] = pkcs1_key + + return encoder.encode(pkcs8_key) + + +def rsa_public_key_pkcs1_to_pkcs8(pkcs1_key): + """Convert a PKCS1-encoded RSA private key to PKCS8.""" + algorithm = RsaAlgorithmIdentifier() + algorithm["rsaEncryption"] = RSA_ENCRYPTION_ASN1_OID + + pkcs8_key = PublicKeyInfo() + pkcs8_key["algorithm"] = algorithm + pkcs8_key["publicKey"] = univ.BitString.fromOctetString(pkcs1_key) + + return encoder.encode(pkcs8_key) + + +def rsa_public_key_pkcs8_to_pkcs1(pkcs8_key): + """Convert a PKCS8-encoded RSA private key to PKCS1.""" + decoded_values = decoder.decode(pkcs8_key, asn1Spec=PublicKeyInfo()) + + try: + decoded_key = decoded_values[0] + except IndexError: + raise ValueError("Invalid public key encoding.") + + return decoded_key["publicKey"].asOctets() diff --git a/venv/lib/python3.10/site-packages/jose/backends/base.py b/venv/lib/python3.10/site-packages/jose/backends/base.py new file mode 100644 index 0000000..b000a52 --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/backends/base.py @@ -0,0 +1,89 @@ +from ..utils import base64url_encode, ensure_binary + + +class Key: + """ + A simple interface for implementing JWK keys. + """ + + def __init__(self, key, algorithm): + pass + + def sign(self, msg): + raise NotImplementedError() + + def verify(self, msg, sig): + raise NotImplementedError() + + def public_key(self): + raise NotImplementedError() + + def to_pem(self): + raise NotImplementedError() + + def to_dict(self): + raise NotImplementedError() + + def encrypt(self, plain_text, aad=None): + """ + Encrypt the plain text and generate an auth tag if appropriate + + Args: + plain_text (bytes): Data to encrypt + aad (bytes, optional): Authenticated Additional Data if key's algorithm supports auth mode + + Returns: + (bytes, bytes, bytes): IV, cipher text, and auth tag + """ + raise NotImplementedError() + + def decrypt(self, cipher_text, iv=None, aad=None, tag=None): + """ + Decrypt the cipher text and validate the auth tag if present + Args: + cipher_text (bytes): Cipher text to decrypt + iv (bytes): IV if block mode + aad (bytes): Additional Authenticated Data to verify if auth mode + tag (bytes): Authentication tag if auth mode + + Returns: + bytes: Decrypted value + """ + raise NotImplementedError() + + def wrap_key(self, key_data): + """ + Wrap the the plain text key data + + Args: + key_data (bytes): Key data to wrap + + Returns: + bytes: Wrapped key + """ + raise NotImplementedError() + + def unwrap_key(self, wrapped_key): + """ + Unwrap the the wrapped key data + + Args: + wrapped_key (bytes): Wrapped key data to unwrap + + Returns: + bytes: Unwrapped key + """ + raise NotImplementedError() + + +class DIRKey(Key): + def __init__(self, key_data, algorithm): + self._key = ensure_binary(key_data) + self._alg = algorithm + + def to_dict(self): + return { + "alg": self._alg, + "kty": "oct", + "k": base64url_encode(self._key), + } diff --git a/venv/lib/python3.10/site-packages/jose/backends/cryptography_backend.py b/venv/lib/python3.10/site-packages/jose/backends/cryptography_backend.py new file mode 100644 index 0000000..abd2426 --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/backends/cryptography_backend.py @@ -0,0 +1,605 @@ +import math +import warnings + +from cryptography.exceptions import InvalidSignature, InvalidTag +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.bindings.openssl.binding import Binding +from cryptography.hazmat.primitives import hashes, hmac, serialization +from cryptography.hazmat.primitives.asymmetric import ec, padding, rsa +from cryptography.hazmat.primitives.asymmetric.utils import decode_dss_signature, encode_dss_signature +from cryptography.hazmat.primitives.ciphers import Cipher, aead, algorithms, modes +from cryptography.hazmat.primitives.keywrap import InvalidUnwrap, aes_key_unwrap, aes_key_wrap +from cryptography.hazmat.primitives.padding import PKCS7 +from cryptography.hazmat.primitives.serialization import load_pem_private_key, load_pem_public_key +from cryptography.utils import int_to_bytes +from cryptography.x509 import load_pem_x509_certificate + +from ..constants import ALGORITHMS +from ..exceptions import JWEError, JWKError +from ..utils import base64_to_long, base64url_decode, base64url_encode, ensure_binary, long_to_base64 +from .base import Key + +_binding = None + + +def get_random_bytes(num_bytes): + """ + Get random bytes + + Currently, Cryptography returns OS random bytes. If you want OpenSSL + generated random bytes, you'll have to switch the RAND engine after + initializing the OpenSSL backend + Args: + num_bytes (int): Number of random bytes to generate and return + Returns: + bytes: Random bytes + """ + global _binding + + if _binding is None: + _binding = Binding() + + buf = _binding.ffi.new("char[]", num_bytes) + _binding.lib.RAND_bytes(buf, num_bytes) + rand_bytes = _binding.ffi.buffer(buf, num_bytes)[:] + return rand_bytes + + +class CryptographyECKey(Key): + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + def __init__(self, key, algorithm, cryptography_backend=default_backend): + if algorithm not in ALGORITHMS.EC: + raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) + + self.hash_alg = { + ALGORITHMS.ES256: self.SHA256, + ALGORITHMS.ES384: self.SHA384, + ALGORITHMS.ES512: self.SHA512, + }.get(algorithm) + self._algorithm = algorithm + + self.cryptography_backend = cryptography_backend + + if hasattr(key, "public_bytes") or hasattr(key, "private_bytes"): + self.prepared_key = key + return + + if hasattr(key, "to_pem"): + # convert to PEM and let cryptography below load it as PEM + key = key.to_pem().decode("utf-8") + + if isinstance(key, dict): + self.prepared_key = self._process_jwk(key) + return + + if isinstance(key, str): + key = key.encode("utf-8") + + if isinstance(key, bytes): + # Attempt to load key. We don't know if it's + # a Public Key or a Private Key, so we try + # the Public Key first. + try: + try: + key = load_pem_public_key(key, self.cryptography_backend()) + except ValueError: + key = load_pem_private_key(key, password=None, backend=self.cryptography_backend()) + except Exception as e: + raise JWKError(e) + + self.prepared_key = key + return + + raise JWKError("Unable to parse an ECKey from key: %s" % key) + + def _process_jwk(self, jwk_dict): + if not jwk_dict.get("kty") == "EC": + raise JWKError("Incorrect key type. Expected: 'EC', Received: %s" % jwk_dict.get("kty")) + + if not all(k in jwk_dict for k in ["x", "y", "crv"]): + raise JWKError("Mandatory parameters are missing") + + x = base64_to_long(jwk_dict.get("x")) + y = base64_to_long(jwk_dict.get("y")) + curve = { + "P-256": ec.SECP256R1, + "P-384": ec.SECP384R1, + "P-521": ec.SECP521R1, + }[jwk_dict["crv"]] + + public = ec.EllipticCurvePublicNumbers(x, y, curve()) + + if "d" in jwk_dict: + d = base64_to_long(jwk_dict.get("d")) + private = ec.EllipticCurvePrivateNumbers(d, public) + + return private.private_key(self.cryptography_backend()) + else: + return public.public_key(self.cryptography_backend()) + + def _sig_component_length(self): + """Determine the correct serialization length for an encoded signature component. + + This is the number of bytes required to encode the maximum key value. + """ + return int(math.ceil(self.prepared_key.key_size / 8.0)) + + def _der_to_raw(self, der_signature): + """Convert signature from DER encoding to RAW encoding.""" + r, s = decode_dss_signature(der_signature) + component_length = self._sig_component_length() + return int_to_bytes(r, component_length) + int_to_bytes(s, component_length) + + def _raw_to_der(self, raw_signature): + """Convert signature from RAW encoding to DER encoding.""" + component_length = self._sig_component_length() + if len(raw_signature) != int(2 * component_length): + raise ValueError("Invalid signature") + + r_bytes = raw_signature[:component_length] + s_bytes = raw_signature[component_length:] + r = int.from_bytes(r_bytes, "big") + s = int.from_bytes(s_bytes, "big") + return encode_dss_signature(r, s) + + def sign(self, msg): + if self.hash_alg.digest_size * 8 > self.prepared_key.curve.key_size: + raise TypeError( + "this curve (%s) is too short " + "for your digest (%d)" % (self.prepared_key.curve.name, 8 * self.hash_alg.digest_size) + ) + signature = self.prepared_key.sign(msg, ec.ECDSA(self.hash_alg())) + return self._der_to_raw(signature) + + def verify(self, msg, sig): + try: + signature = self._raw_to_der(sig) + self.prepared_key.verify(signature, msg, ec.ECDSA(self.hash_alg())) + return True + except Exception: + return False + + def is_public(self): + return hasattr(self.prepared_key, "public_bytes") + + def public_key(self): + if self.is_public(): + return self + return self.__class__(self.prepared_key.public_key(), self._algorithm) + + def to_pem(self): + if self.is_public(): + pem = self.prepared_key.public_bytes( + encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + return pem + pem = self.prepared_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + return pem + + def to_dict(self): + if not self.is_public(): + public_key = self.prepared_key.public_key() + else: + public_key = self.prepared_key + + crv = { + "secp256r1": "P-256", + "secp384r1": "P-384", + "secp521r1": "P-521", + }[self.prepared_key.curve.name] + + # Calculate the key size in bytes. Section 6.2.1.2 and 6.2.1.3 of + # RFC7518 prescribes that the 'x', 'y' and 'd' parameters of the curve + # points must be encoded as octed-strings of this length. + key_size = (self.prepared_key.curve.key_size + 7) // 8 + + data = { + "alg": self._algorithm, + "kty": "EC", + "crv": crv, + "x": long_to_base64(public_key.public_numbers().x, size=key_size).decode("ASCII"), + "y": long_to_base64(public_key.public_numbers().y, size=key_size).decode("ASCII"), + } + + if not self.is_public(): + private_value = self.prepared_key.private_numbers().private_value + data["d"] = long_to_base64(private_value, size=key_size).decode("ASCII") + + return data + + +class CryptographyRSAKey(Key): + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + RSA1_5 = padding.PKCS1v15() + RSA_OAEP = padding.OAEP(padding.MGF1(hashes.SHA1()), hashes.SHA1(), None) + RSA_OAEP_256 = padding.OAEP(padding.MGF1(hashes.SHA256()), hashes.SHA256(), None) + + def __init__(self, key, algorithm, cryptography_backend=default_backend): + if algorithm not in ALGORITHMS.RSA: + raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) + + self.hash_alg = { + ALGORITHMS.RS256: self.SHA256, + ALGORITHMS.RS384: self.SHA384, + ALGORITHMS.RS512: self.SHA512, + }.get(algorithm) + self._algorithm = algorithm + + self.padding = { + ALGORITHMS.RSA1_5: self.RSA1_5, + ALGORITHMS.RSA_OAEP: self.RSA_OAEP, + ALGORITHMS.RSA_OAEP_256: self.RSA_OAEP_256, + }.get(algorithm) + + self.cryptography_backend = cryptography_backend + + # if it conforms to RSAPublicKey interface + if hasattr(key, "public_bytes") and hasattr(key, "public_numbers"): + self.prepared_key = key + return + + if isinstance(key, dict): + self.prepared_key = self._process_jwk(key) + return + + if isinstance(key, str): + key = key.encode("utf-8") + + if isinstance(key, bytes): + try: + if key.startswith(b"-----BEGIN CERTIFICATE-----"): + self._process_cert(key) + return + + try: + self.prepared_key = load_pem_public_key(key, self.cryptography_backend()) + except ValueError: + self.prepared_key = load_pem_private_key(key, password=None, backend=self.cryptography_backend()) + except Exception as e: + raise JWKError(e) + return + + raise JWKError("Unable to parse an RSA_JWK from key: %s" % key) + + def _process_jwk(self, jwk_dict): + if not jwk_dict.get("kty") == "RSA": + raise JWKError("Incorrect key type. Expected: 'RSA', Received: %s" % jwk_dict.get("kty")) + + e = base64_to_long(jwk_dict.get("e", 256)) + n = base64_to_long(jwk_dict.get("n")) + public = rsa.RSAPublicNumbers(e, n) + + if "d" not in jwk_dict: + return public.public_key(self.cryptography_backend()) + else: + # This is a private key. + d = base64_to_long(jwk_dict.get("d")) + + extra_params = ["p", "q", "dp", "dq", "qi"] + + if any(k in jwk_dict for k in extra_params): + # Precomputed private key parameters are available. + if not all(k in jwk_dict for k in extra_params): + # These values must be present when 'p' is according to + # Section 6.3.2 of RFC7518, so if they are not we raise + # an error. + raise JWKError("Precomputed private key parameters are incomplete.") + + p = base64_to_long(jwk_dict["p"]) + q = base64_to_long(jwk_dict["q"]) + dp = base64_to_long(jwk_dict["dp"]) + dq = base64_to_long(jwk_dict["dq"]) + qi = base64_to_long(jwk_dict["qi"]) + else: + # The precomputed private key parameters are not available, + # so we use cryptography's API to fill them in. + p, q = rsa.rsa_recover_prime_factors(n, e, d) + dp = rsa.rsa_crt_dmp1(d, p) + dq = rsa.rsa_crt_dmq1(d, q) + qi = rsa.rsa_crt_iqmp(p, q) + + private = rsa.RSAPrivateNumbers(p, q, d, dp, dq, qi, public) + + return private.private_key(self.cryptography_backend()) + + def _process_cert(self, key): + key = load_pem_x509_certificate(key, self.cryptography_backend()) + self.prepared_key = key.public_key() + + def sign(self, msg): + try: + signature = self.prepared_key.sign(msg, padding.PKCS1v15(), self.hash_alg()) + except Exception as e: + raise JWKError(e) + return signature + + def verify(self, msg, sig): + if not self.is_public(): + warnings.warn("Attempting to verify a message with a private key. " "This is not recommended.") + + try: + self.public_key().prepared_key.verify(sig, msg, padding.PKCS1v15(), self.hash_alg()) + return True + except InvalidSignature: + return False + + def is_public(self): + return hasattr(self.prepared_key, "public_bytes") + + def public_key(self): + if self.is_public(): + return self + return self.__class__(self.prepared_key.public_key(), self._algorithm) + + def to_pem(self, pem_format="PKCS8"): + if self.is_public(): + if pem_format == "PKCS8": + fmt = serialization.PublicFormat.SubjectPublicKeyInfo + elif pem_format == "PKCS1": + fmt = serialization.PublicFormat.PKCS1 + else: + raise ValueError("Invalid format specified: %r" % pem_format) + pem = self.prepared_key.public_bytes(encoding=serialization.Encoding.PEM, format=fmt) + return pem + + if pem_format == "PKCS8": + fmt = serialization.PrivateFormat.PKCS8 + elif pem_format == "PKCS1": + fmt = serialization.PrivateFormat.TraditionalOpenSSL + else: + raise ValueError("Invalid format specified: %r" % pem_format) + + return self.prepared_key.private_bytes( + encoding=serialization.Encoding.PEM, format=fmt, encryption_algorithm=serialization.NoEncryption() + ) + + def to_dict(self): + if not self.is_public(): + public_key = self.prepared_key.public_key() + else: + public_key = self.prepared_key + + data = { + "alg": self._algorithm, + "kty": "RSA", + "n": long_to_base64(public_key.public_numbers().n).decode("ASCII"), + "e": long_to_base64(public_key.public_numbers().e).decode("ASCII"), + } + + if not self.is_public(): + data.update( + { + "d": long_to_base64(self.prepared_key.private_numbers().d).decode("ASCII"), + "p": long_to_base64(self.prepared_key.private_numbers().p).decode("ASCII"), + "q": long_to_base64(self.prepared_key.private_numbers().q).decode("ASCII"), + "dp": long_to_base64(self.prepared_key.private_numbers().dmp1).decode("ASCII"), + "dq": long_to_base64(self.prepared_key.private_numbers().dmq1).decode("ASCII"), + "qi": long_to_base64(self.prepared_key.private_numbers().iqmp).decode("ASCII"), + } + ) + + return data + + def wrap_key(self, key_data): + try: + wrapped_key = self.prepared_key.encrypt(key_data, self.padding) + except Exception as e: + raise JWEError(e) + + return wrapped_key + + def unwrap_key(self, wrapped_key): + try: + unwrapped_key = self.prepared_key.decrypt(wrapped_key, self.padding) + return unwrapped_key + except Exception as e: + raise JWEError(e) + + +class CryptographyAESKey(Key): + KEY_128 = (ALGORITHMS.A128GCM, ALGORITHMS.A128GCMKW, ALGORITHMS.A128KW, ALGORITHMS.A128CBC) + KEY_192 = (ALGORITHMS.A192GCM, ALGORITHMS.A192GCMKW, ALGORITHMS.A192KW, ALGORITHMS.A192CBC) + KEY_256 = ( + ALGORITHMS.A256GCM, + ALGORITHMS.A256GCMKW, + ALGORITHMS.A256KW, + ALGORITHMS.A128CBC_HS256, + ALGORITHMS.A256CBC, + ) + KEY_384 = (ALGORITHMS.A192CBC_HS384,) + KEY_512 = (ALGORITHMS.A256CBC_HS512,) + + AES_KW_ALGS = (ALGORITHMS.A128KW, ALGORITHMS.A192KW, ALGORITHMS.A256KW) + + MODES = { + ALGORITHMS.A128GCM: modes.GCM, + ALGORITHMS.A192GCM: modes.GCM, + ALGORITHMS.A256GCM: modes.GCM, + ALGORITHMS.A128CBC_HS256: modes.CBC, + ALGORITHMS.A192CBC_HS384: modes.CBC, + ALGORITHMS.A256CBC_HS512: modes.CBC, + ALGORITHMS.A128CBC: modes.CBC, + ALGORITHMS.A192CBC: modes.CBC, + ALGORITHMS.A256CBC: modes.CBC, + ALGORITHMS.A128GCMKW: modes.GCM, + ALGORITHMS.A192GCMKW: modes.GCM, + ALGORITHMS.A256GCMKW: modes.GCM, + ALGORITHMS.A128KW: None, + ALGORITHMS.A192KW: None, + ALGORITHMS.A256KW: None, + } + + def __init__(self, key, algorithm): + if algorithm not in ALGORITHMS.AES: + raise JWKError("%s is not a valid AES algorithm" % algorithm) + if algorithm not in ALGORITHMS.SUPPORTED.union(ALGORITHMS.AES_PSEUDO): + raise JWKError("%s is not a supported algorithm" % algorithm) + + self._algorithm = algorithm + self._mode = self.MODES.get(self._algorithm) + + if algorithm in self.KEY_128 and len(key) != 16: + raise JWKError(f"Key must be 128 bit for alg {algorithm}") + elif algorithm in self.KEY_192 and len(key) != 24: + raise JWKError(f"Key must be 192 bit for alg {algorithm}") + elif algorithm in self.KEY_256 and len(key) != 32: + raise JWKError(f"Key must be 256 bit for alg {algorithm}") + elif algorithm in self.KEY_384 and len(key) != 48: + raise JWKError(f"Key must be 384 bit for alg {algorithm}") + elif algorithm in self.KEY_512 and len(key) != 64: + raise JWKError(f"Key must be 512 bit for alg {algorithm}") + + self._key = key + + def to_dict(self): + data = {"alg": self._algorithm, "kty": "oct", "k": base64url_encode(self._key)} + return data + + def encrypt(self, plain_text, aad=None): + plain_text = ensure_binary(plain_text) + try: + iv = get_random_bytes(algorithms.AES.block_size // 8) + mode = self._mode(iv) + if mode.name == "GCM": + cipher = aead.AESGCM(self._key) + cipher_text_and_tag = cipher.encrypt(iv, plain_text, aad) + cipher_text = cipher_text_and_tag[: len(cipher_text_and_tag) - 16] + auth_tag = cipher_text_and_tag[-16:] + else: + cipher = Cipher(algorithms.AES(self._key), mode, backend=default_backend()) + encryptor = cipher.encryptor() + padder = PKCS7(algorithms.AES.block_size).padder() + padded_data = padder.update(plain_text) + padded_data += padder.finalize() + cipher_text = encryptor.update(padded_data) + encryptor.finalize() + auth_tag = None + return iv, cipher_text, auth_tag + except Exception as e: + raise JWEError(e) + + def decrypt(self, cipher_text, iv=None, aad=None, tag=None): + cipher_text = ensure_binary(cipher_text) + try: + iv = ensure_binary(iv) + mode = self._mode(iv) + if mode.name == "GCM": + if tag is None: + raise ValueError("tag cannot be None") + cipher = aead.AESGCM(self._key) + cipher_text_and_tag = cipher_text + tag + try: + plain_text = cipher.decrypt(iv, cipher_text_and_tag, aad) + except InvalidTag: + raise JWEError("Invalid JWE Auth Tag") + else: + cipher = Cipher(algorithms.AES(self._key), mode, backend=default_backend()) + decryptor = cipher.decryptor() + padded_plain_text = decryptor.update(cipher_text) + padded_plain_text += decryptor.finalize() + unpadder = PKCS7(algorithms.AES.block_size).unpadder() + plain_text = unpadder.update(padded_plain_text) + plain_text += unpadder.finalize() + + return plain_text + except Exception as e: + raise JWEError(e) + + def wrap_key(self, key_data): + key_data = ensure_binary(key_data) + cipher_text = aes_key_wrap(self._key, key_data, default_backend()) + return cipher_text # IV, cipher text, auth tag + + def unwrap_key(self, wrapped_key): + wrapped_key = ensure_binary(wrapped_key) + try: + plain_text = aes_key_unwrap(self._key, wrapped_key, default_backend()) + except InvalidUnwrap as cause: + raise JWEError(cause) + return plain_text + + +class CryptographyHMACKey(Key): + """ + Performs signing and verification operations using HMAC + and the specified hash function. + """ + + ALG_MAP = {ALGORITHMS.HS256: hashes.SHA256(), ALGORITHMS.HS384: hashes.SHA384(), ALGORITHMS.HS512: hashes.SHA512()} + + def __init__(self, key, algorithm): + if algorithm not in ALGORITHMS.HMAC: + raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) + self._algorithm = algorithm + self._hash_alg = self.ALG_MAP.get(algorithm) + + if isinstance(key, dict): + self.prepared_key = self._process_jwk(key) + return + + if not isinstance(key, str) and not isinstance(key, bytes): + raise JWKError("Expecting a string- or bytes-formatted key.") + + if isinstance(key, str): + key = key.encode("utf-8") + + invalid_strings = [ + b"-----BEGIN PUBLIC KEY-----", + b"-----BEGIN RSA PUBLIC KEY-----", + b"-----BEGIN CERTIFICATE-----", + b"ssh-rsa", + ] + + if any(string_value in key for string_value in invalid_strings): + raise JWKError( + "The specified key is an asymmetric key or x509 certificate and" + " should not be used as an HMAC secret." + ) + + self.prepared_key = key + + def _process_jwk(self, jwk_dict): + if not jwk_dict.get("kty") == "oct": + raise JWKError("Incorrect key type. Expected: 'oct', Received: %s" % jwk_dict.get("kty")) + + k = jwk_dict.get("k") + k = k.encode("utf-8") + k = bytes(k) + k = base64url_decode(k) + + return k + + def to_dict(self): + return { + "alg": self._algorithm, + "kty": "oct", + "k": base64url_encode(self.prepared_key).decode("ASCII"), + } + + def sign(self, msg): + msg = ensure_binary(msg) + h = hmac.HMAC(self.prepared_key, self._hash_alg, backend=default_backend()) + h.update(msg) + signature = h.finalize() + return signature + + def verify(self, msg, sig): + msg = ensure_binary(msg) + sig = ensure_binary(sig) + h = hmac.HMAC(self.prepared_key, self._hash_alg, backend=default_backend()) + h.update(msg) + try: + h.verify(sig) + verified = True + except InvalidSignature: + verified = False + return verified diff --git a/venv/lib/python3.10/site-packages/jose/backends/ecdsa_backend.py b/venv/lib/python3.10/site-packages/jose/backends/ecdsa_backend.py new file mode 100644 index 0000000..756c7ea --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/backends/ecdsa_backend.py @@ -0,0 +1,150 @@ +import hashlib + +import ecdsa + +from jose.backends.base import Key +from jose.constants import ALGORITHMS +from jose.exceptions import JWKError +from jose.utils import base64_to_long, long_to_base64 + + +class ECDSAECKey(Key): + """ + Performs signing and verification operations using + ECDSA and the specified hash function + + This class requires the ecdsa package to be installed. + + This is based off of the implementation in PyJWT 0.3.2 + """ + + SHA256 = hashlib.sha256 + SHA384 = hashlib.sha384 + SHA512 = hashlib.sha512 + + CURVE_MAP = { + SHA256: ecdsa.curves.NIST256p, + SHA384: ecdsa.curves.NIST384p, + SHA512: ecdsa.curves.NIST521p, + } + CURVE_NAMES = ( + (ecdsa.curves.NIST256p, "P-256"), + (ecdsa.curves.NIST384p, "P-384"), + (ecdsa.curves.NIST521p, "P-521"), + ) + + def __init__(self, key, algorithm): + if algorithm not in ALGORITHMS.EC: + raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) + + self.hash_alg = { + ALGORITHMS.ES256: self.SHA256, + ALGORITHMS.ES384: self.SHA384, + ALGORITHMS.ES512: self.SHA512, + }.get(algorithm) + self._algorithm = algorithm + + self.curve = self.CURVE_MAP.get(self.hash_alg) + + if isinstance(key, (ecdsa.SigningKey, ecdsa.VerifyingKey)): + self.prepared_key = key + return + + if isinstance(key, dict): + self.prepared_key = self._process_jwk(key) + return + + if isinstance(key, str): + key = key.encode("utf-8") + + if isinstance(key, bytes): + # Attempt to load key. We don't know if it's + # a Signing Key or a Verifying Key, so we try + # the Verifying Key first. + try: + key = ecdsa.VerifyingKey.from_pem(key) + except ecdsa.der.UnexpectedDER: + key = ecdsa.SigningKey.from_pem(key) + except Exception as e: + raise JWKError(e) + + self.prepared_key = key + return + + raise JWKError("Unable to parse an ECKey from key: %s" % key) + + def _process_jwk(self, jwk_dict): + if not jwk_dict.get("kty") == "EC": + raise JWKError("Incorrect key type. Expected: 'EC', Received: %s" % jwk_dict.get("kty")) + + if not all(k in jwk_dict for k in ["x", "y", "crv"]): + raise JWKError("Mandatory parameters are missing") + + if "d" in jwk_dict: + # We are dealing with a private key; the secret exponent is enough + # to create an ecdsa key. + d = base64_to_long(jwk_dict.get("d")) + return ecdsa.keys.SigningKey.from_secret_exponent(d, self.curve) + else: + x = base64_to_long(jwk_dict.get("x")) + y = base64_to_long(jwk_dict.get("y")) + + if not ecdsa.ecdsa.point_is_valid(self.curve.generator, x, y): + raise JWKError(f"Point: {x}, {y} is not a valid point") + + point = ecdsa.ellipticcurve.Point(self.curve.curve, x, y, self.curve.order) + return ecdsa.keys.VerifyingKey.from_public_point(point, self.curve) + + def sign(self, msg): + return self.prepared_key.sign( + msg, hashfunc=self.hash_alg, sigencode=ecdsa.util.sigencode_string, allow_truncate=False + ) + + def verify(self, msg, sig): + try: + return self.prepared_key.verify( + sig, msg, hashfunc=self.hash_alg, sigdecode=ecdsa.util.sigdecode_string, allow_truncate=False + ) + except Exception: + return False + + def is_public(self): + return isinstance(self.prepared_key, ecdsa.VerifyingKey) + + def public_key(self): + if self.is_public(): + return self + return self.__class__(self.prepared_key.get_verifying_key(), self._algorithm) + + def to_pem(self): + return self.prepared_key.to_pem() + + def to_dict(self): + if not self.is_public(): + public_key = self.prepared_key.get_verifying_key() + else: + public_key = self.prepared_key + crv = None + for key, value in self.CURVE_NAMES: + if key == self.prepared_key.curve: + crv = value + if not crv: + raise KeyError(f"Can't match {self.prepared_key.curve}") + + # Calculate the key size in bytes. Section 6.2.1.2 and 6.2.1.3 of + # RFC7518 prescribes that the 'x', 'y' and 'd' parameters of the curve + # points must be encoded as octed-strings of this length. + key_size = self.prepared_key.curve.baselen + + data = { + "alg": self._algorithm, + "kty": "EC", + "crv": crv, + "x": long_to_base64(public_key.pubkey.point.x(), size=key_size).decode("ASCII"), + "y": long_to_base64(public_key.pubkey.point.y(), size=key_size).decode("ASCII"), + } + + if not self.is_public(): + data["d"] = long_to_base64(self.prepared_key.privkey.secret_multiplier, size=key_size).decode("ASCII") + + return data diff --git a/venv/lib/python3.10/site-packages/jose/backends/native.py b/venv/lib/python3.10/site-packages/jose/backends/native.py new file mode 100644 index 0000000..eb3a6ae --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/backends/native.py @@ -0,0 +1,76 @@ +import hashlib +import hmac +import os + +from jose.backends.base import Key +from jose.constants import ALGORITHMS +from jose.exceptions import JWKError +from jose.utils import base64url_decode, base64url_encode + + +def get_random_bytes(num_bytes): + return bytes(os.urandom(num_bytes)) + + +class HMACKey(Key): + """ + Performs signing and verification operations using HMAC + and the specified hash function. + """ + + HASHES = {ALGORITHMS.HS256: hashlib.sha256, ALGORITHMS.HS384: hashlib.sha384, ALGORITHMS.HS512: hashlib.sha512} + + def __init__(self, key, algorithm): + if algorithm not in ALGORITHMS.HMAC: + raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) + self._algorithm = algorithm + self._hash_alg = self.HASHES.get(algorithm) + + if isinstance(key, dict): + self.prepared_key = self._process_jwk(key) + return + + if not isinstance(key, str) and not isinstance(key, bytes): + raise JWKError("Expecting a string- or bytes-formatted key.") + + if isinstance(key, str): + key = key.encode("utf-8") + + invalid_strings = [ + b"-----BEGIN PUBLIC KEY-----", + b"-----BEGIN RSA PUBLIC KEY-----", + b"-----BEGIN CERTIFICATE-----", + b"ssh-rsa", + ] + + if any(string_value in key for string_value in invalid_strings): + raise JWKError( + "The specified key is an asymmetric key or x509 certificate and" + " should not be used as an HMAC secret." + ) + + self.prepared_key = key + + def _process_jwk(self, jwk_dict): + if not jwk_dict.get("kty") == "oct": + raise JWKError("Incorrect key type. Expected: 'oct', Received: %s" % jwk_dict.get("kty")) + + k = jwk_dict.get("k") + k = k.encode("utf-8") + k = bytes(k) + k = base64url_decode(k) + + return k + + def sign(self, msg): + return hmac.new(self.prepared_key, msg, self._hash_alg).digest() + + def verify(self, msg, sig): + return hmac.compare_digest(sig, self.sign(msg)) + + def to_dict(self): + return { + "alg": self._algorithm, + "kty": "oct", + "k": base64url_encode(self.prepared_key).decode("ASCII"), + } diff --git a/venv/lib/python3.10/site-packages/jose/backends/rsa_backend.py b/venv/lib/python3.10/site-packages/jose/backends/rsa_backend.py new file mode 100644 index 0000000..4e8ccf1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/backends/rsa_backend.py @@ -0,0 +1,284 @@ +import binascii +import warnings + +import rsa as pyrsa +import rsa.pem as pyrsa_pem +from pyasn1.error import PyAsn1Error +from rsa import DecryptionError + +from jose.backends._asn1 import ( + rsa_private_key_pkcs1_to_pkcs8, + rsa_private_key_pkcs8_to_pkcs1, + rsa_public_key_pkcs1_to_pkcs8, +) +from jose.backends.base import Key +from jose.constants import ALGORITHMS +from jose.exceptions import JWEError, JWKError +from jose.utils import base64_to_long, long_to_base64 + +ALGORITHMS.SUPPORTED.remove(ALGORITHMS.RSA_OAEP) # RSA OAEP not supported + +LEGACY_INVALID_PKCS8_RSA_HEADER = binascii.unhexlify( + "30" # sequence + "8204BD" # DER-encoded sequence contents length of 1213 bytes -- INCORRECT STATIC LENGTH + "020100" # integer: 0 -- Version + "30" # sequence + "0D" # DER-encoded sequence contents length of 13 bytes -- PrivateKeyAlgorithmIdentifier + "06092A864886F70D010101" # OID -- rsaEncryption + "0500" # NULL -- parameters +) +ASN1_SEQUENCE_ID = binascii.unhexlify("30") +RSA_ENCRYPTION_ASN1_OID = "1.2.840.113549.1.1.1" + +# Functions gcd and rsa_recover_prime_factors were copied from cryptography 1.9 +# to enable pure python rsa module to be in compliance with section 6.3.1 of RFC7518 +# which requires only private exponent (d) for private key. + + +def _gcd(a, b): + """Calculate the Greatest Common Divisor of a and b. + + Unless b==0, the result will have the same sign as b (so that when + b is divided by it, the result comes out positive). + """ + while b: + a, b = b, (a % b) + return a + + +# Controls the number of iterations rsa_recover_prime_factors will perform +# to obtain the prime factors. Each iteration increments by 2 so the actual +# maximum attempts is half this number. +_MAX_RECOVERY_ATTEMPTS = 1000 + + +def _rsa_recover_prime_factors(n, e, d): + """ + Compute factors p and q from the private exponent d. We assume that n has + no more than two factors. This function is adapted from code in PyCrypto. + """ + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d * e - 1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t % 2 == 0: + t = t // 2 + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = False + a = 2 + while not spotted and a < _MAX_RECOVERY_ATTEMPTS: + k = t + # Cycle through all values a^{t*2^i}=a^k + while k < ktot: + cand = pow(a, k, n) + # Check if a^k is a non-trivial root of unity (mod n) + if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: + # We have found a number such that (cand-1)(cand+1)=0 (mod n). + # Either of the terms divides n. + p = _gcd(cand + 1, n) + spotted = True + break + k *= 2 + # This value was not any good... let's try another! + a += 2 + if not spotted: + raise ValueError("Unable to compute factors p and q from exponent d.") + # Found ! + q, r = divmod(n, p) + assert r == 0 + p, q = sorted((p, q), reverse=True) + return (p, q) + + +def pem_to_spki(pem, fmt="PKCS8"): + key = RSAKey(pem, ALGORITHMS.RS256) + return key.to_pem(fmt) + + +def _legacy_private_key_pkcs8_to_pkcs1(pkcs8_key): + """Legacy RSA private key PKCS8-to-PKCS1 conversion. + + .. warning:: + + This is incorrect parsing and only works because the legacy PKCS1-to-PKCS8 + encoding was also incorrect. + """ + # Only allow this processing if the prefix matches + # AND the following byte indicates an ASN1 sequence, + # as we would expect with the legacy encoding. + if not pkcs8_key.startswith(LEGACY_INVALID_PKCS8_RSA_HEADER + ASN1_SEQUENCE_ID): + raise ValueError("Invalid private key encoding") + + return pkcs8_key[len(LEGACY_INVALID_PKCS8_RSA_HEADER) :] + + +class RSAKey(Key): + SHA256 = "SHA-256" + SHA384 = "SHA-384" + SHA512 = "SHA-512" + + def __init__(self, key, algorithm): + if algorithm not in ALGORITHMS.RSA: + raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) + + if algorithm in ALGORITHMS.RSA_KW and algorithm != ALGORITHMS.RSA1_5: + raise JWKError("alg: %s is not supported by the RSA backend" % algorithm) + + self.hash_alg = { + ALGORITHMS.RS256: self.SHA256, + ALGORITHMS.RS384: self.SHA384, + ALGORITHMS.RS512: self.SHA512, + }.get(algorithm) + self._algorithm = algorithm + + if isinstance(key, dict): + self._prepared_key = self._process_jwk(key) + return + + if isinstance(key, (pyrsa.PublicKey, pyrsa.PrivateKey)): + self._prepared_key = key + return + + if isinstance(key, str): + key = key.encode("utf-8") + + if isinstance(key, bytes): + try: + self._prepared_key = pyrsa.PublicKey.load_pkcs1(key) + except ValueError: + try: + self._prepared_key = pyrsa.PublicKey.load_pkcs1_openssl_pem(key) + except ValueError: + try: + self._prepared_key = pyrsa.PrivateKey.load_pkcs1(key) + except ValueError: + try: + der = pyrsa_pem.load_pem(key, b"PRIVATE KEY") + try: + pkcs1_key = rsa_private_key_pkcs8_to_pkcs1(der) + except PyAsn1Error: + # If the key was encoded using the old, invalid, + # encoding then pyasn1 will throw an error attempting + # to parse the key. + pkcs1_key = _legacy_private_key_pkcs8_to_pkcs1(der) + self._prepared_key = pyrsa.PrivateKey.load_pkcs1(pkcs1_key, format="DER") + except ValueError as e: + raise JWKError(e) + return + raise JWKError("Unable to parse an RSA_JWK from key: %s" % key) + + def _process_jwk(self, jwk_dict): + if not jwk_dict.get("kty") == "RSA": + raise JWKError("Incorrect key type. Expected: 'RSA', Received: %s" % jwk_dict.get("kty")) + + e = base64_to_long(jwk_dict.get("e")) + n = base64_to_long(jwk_dict.get("n")) + + if "d" not in jwk_dict: + return pyrsa.PublicKey(e=e, n=n) + else: + d = base64_to_long(jwk_dict.get("d")) + extra_params = ["p", "q", "dp", "dq", "qi"] + + if any(k in jwk_dict for k in extra_params): + # Precomputed private key parameters are available. + if not all(k in jwk_dict for k in extra_params): + # These values must be present when 'p' is according to + # Section 6.3.2 of RFC7518, so if they are not we raise + # an error. + raise JWKError("Precomputed private key parameters are incomplete.") + + p = base64_to_long(jwk_dict["p"]) + q = base64_to_long(jwk_dict["q"]) + return pyrsa.PrivateKey(e=e, n=n, d=d, p=p, q=q) + else: + p, q = _rsa_recover_prime_factors(n, e, d) + return pyrsa.PrivateKey(n=n, e=e, d=d, p=p, q=q) + + def sign(self, msg): + return pyrsa.sign(msg, self._prepared_key, self.hash_alg) + + def verify(self, msg, sig): + if not self.is_public(): + warnings.warn("Attempting to verify a message with a private key. " "This is not recommended.") + try: + pyrsa.verify(msg, sig, self._prepared_key) + return True + except pyrsa.pkcs1.VerificationError: + return False + + def is_public(self): + return isinstance(self._prepared_key, pyrsa.PublicKey) + + def public_key(self): + if isinstance(self._prepared_key, pyrsa.PublicKey): + return self + return self.__class__(pyrsa.PublicKey(n=self._prepared_key.n, e=self._prepared_key.e), self._algorithm) + + def to_pem(self, pem_format="PKCS8"): + + if isinstance(self._prepared_key, pyrsa.PrivateKey): + der = self._prepared_key.save_pkcs1(format="DER") + if pem_format == "PKCS8": + pkcs8_der = rsa_private_key_pkcs1_to_pkcs8(der) + pem = pyrsa_pem.save_pem(pkcs8_der, pem_marker="PRIVATE KEY") + elif pem_format == "PKCS1": + pem = pyrsa_pem.save_pem(der, pem_marker="RSA PRIVATE KEY") + else: + raise ValueError(f"Invalid pem format specified: {pem_format!r}") + else: + if pem_format == "PKCS8": + pkcs1_der = self._prepared_key.save_pkcs1(format="DER") + pkcs8_der = rsa_public_key_pkcs1_to_pkcs8(pkcs1_der) + pem = pyrsa_pem.save_pem(pkcs8_der, pem_marker="PUBLIC KEY") + elif pem_format == "PKCS1": + der = self._prepared_key.save_pkcs1(format="DER") + pem = pyrsa_pem.save_pem(der, pem_marker="RSA PUBLIC KEY") + else: + raise ValueError(f"Invalid pem format specified: {pem_format!r}") + return pem + + def to_dict(self): + if not self.is_public(): + public_key = self.public_key()._prepared_key + else: + public_key = self._prepared_key + + data = { + "alg": self._algorithm, + "kty": "RSA", + "n": long_to_base64(public_key.n).decode("ASCII"), + "e": long_to_base64(public_key.e).decode("ASCII"), + } + + if not self.is_public(): + data.update( + { + "d": long_to_base64(self._prepared_key.d).decode("ASCII"), + "p": long_to_base64(self._prepared_key.p).decode("ASCII"), + "q": long_to_base64(self._prepared_key.q).decode("ASCII"), + "dp": long_to_base64(self._prepared_key.exp1).decode("ASCII"), + "dq": long_to_base64(self._prepared_key.exp2).decode("ASCII"), + "qi": long_to_base64(self._prepared_key.coef).decode("ASCII"), + } + ) + + return data + + def wrap_key(self, key_data): + if not self.is_public(): + warnings.warn("Attempting to encrypt a message with a private key." " This is not recommended.") + wrapped_key = pyrsa.encrypt(key_data, self._prepared_key) + return wrapped_key + + def unwrap_key(self, wrapped_key): + try: + unwrapped_key = pyrsa.decrypt(wrapped_key, self._prepared_key) + except DecryptionError as e: + raise JWEError(e) + return unwrapped_key diff --git a/venv/lib/python3.10/site-packages/jose/constants.py b/venv/lib/python3.10/site-packages/jose/constants.py new file mode 100644 index 0000000..ab4d74d --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/constants.py @@ -0,0 +1,98 @@ +import hashlib + + +class Algorithms: + # DS Algorithms + NONE = "none" + HS256 = "HS256" + HS384 = "HS384" + HS512 = "HS512" + RS256 = "RS256" + RS384 = "RS384" + RS512 = "RS512" + ES256 = "ES256" + ES384 = "ES384" + ES512 = "ES512" + + # Content Encryption Algorithms + A128CBC_HS256 = "A128CBC-HS256" + A192CBC_HS384 = "A192CBC-HS384" + A256CBC_HS512 = "A256CBC-HS512" + A128GCM = "A128GCM" + A192GCM = "A192GCM" + A256GCM = "A256GCM" + + # Pseudo algorithm for encryption + A128CBC = "A128CBC" + A192CBC = "A192CBC" + A256CBC = "A256CBC" + + # CEK Encryption Algorithms + DIR = "dir" + RSA1_5 = "RSA1_5" + RSA_OAEP = "RSA-OAEP" + RSA_OAEP_256 = "RSA-OAEP-256" + A128KW = "A128KW" + A192KW = "A192KW" + A256KW = "A256KW" + ECDH_ES = "ECDH-ES" + ECDH_ES_A128KW = "ECDH-ES+A128KW" + ECDH_ES_A192KW = "ECDH-ES+A192KW" + ECDH_ES_A256KW = "ECDH-ES+A256KW" + A128GCMKW = "A128GCMKW" + A192GCMKW = "A192GCMKW" + A256GCMKW = "A256GCMKW" + PBES2_HS256_A128KW = "PBES2-HS256+A128KW" + PBES2_HS384_A192KW = "PBES2-HS384+A192KW" + PBES2_HS512_A256KW = "PBES2-HS512+A256KW" + + # Compression Algorithms + DEF = "DEF" + + HMAC = {HS256, HS384, HS512} + RSA_DS = {RS256, RS384, RS512} + RSA_KW = {RSA1_5, RSA_OAEP, RSA_OAEP_256} + RSA = RSA_DS.union(RSA_KW) + EC_DS = {ES256, ES384, ES512} + EC_KW = {ECDH_ES, ECDH_ES_A128KW, ECDH_ES_A192KW, ECDH_ES_A256KW} + EC = EC_DS.union(EC_KW) + AES_PSEUDO = {A128CBC, A192CBC, A256CBC, A128GCM, A192GCM, A256GCM} + AES_JWE_ENC = {A128CBC_HS256, A192CBC_HS384, A256CBC_HS512, A128GCM, A192GCM, A256GCM} + AES_ENC = AES_JWE_ENC.union(AES_PSEUDO) + AES_KW = {A128KW, A192KW, A256KW} + AEC_GCM_KW = {A128GCMKW, A192GCMKW, A256GCMKW} + AES = AES_ENC.union(AES_KW) + PBES2_KW = {PBES2_HS256_A128KW, PBES2_HS384_A192KW, PBES2_HS512_A256KW} + + HMAC_AUTH_TAG = {A128CBC_HS256, A192CBC_HS384, A256CBC_HS512} + GCM = {A128GCM, A192GCM, A256GCM} + + SUPPORTED = HMAC.union(RSA_DS).union(EC_DS).union([DIR]).union(AES_JWE_ENC).union(RSA_KW).union(AES_KW) + + ALL = SUPPORTED.union([NONE]).union(AEC_GCM_KW).union(EC_KW).union(PBES2_KW) + + HASHES = { + HS256: hashlib.sha256, + HS384: hashlib.sha384, + HS512: hashlib.sha512, + RS256: hashlib.sha256, + RS384: hashlib.sha384, + RS512: hashlib.sha512, + ES256: hashlib.sha256, + ES384: hashlib.sha384, + ES512: hashlib.sha512, + } + + KEYS = {} + + +ALGORITHMS = Algorithms() + + +class Zips: + DEF = "DEF" + NONE = None + SUPPORTED = {DEF, NONE} + + +ZIPS = Zips() diff --git a/venv/lib/python3.10/site-packages/jose/exceptions.py b/venv/lib/python3.10/site-packages/jose/exceptions.py new file mode 100644 index 0000000..e8edc3b --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/exceptions.py @@ -0,0 +1,59 @@ +class JOSEError(Exception): + pass + + +class JWSError(JOSEError): + pass + + +class JWSSignatureError(JWSError): + pass + + +class JWSAlgorithmError(JWSError): + pass + + +class JWTError(JOSEError): + pass + + +class JWTClaimsError(JWTError): + pass + + +class ExpiredSignatureError(JWTError): + pass + + +class JWKError(JOSEError): + pass + + +class JWEError(JOSEError): + """Base error for all JWE errors""" + + pass + + +class JWEParseError(JWEError): + """Could not parse the JWE string provided""" + + pass + + +class JWEInvalidAuth(JWEError): + """ + The authentication tag did not match the protected sections of the + JWE string provided + """ + + pass + + +class JWEAlgorithmUnsupportedError(JWEError): + """ + The JWE algorithm is not supported by the backend + """ + + pass diff --git a/venv/lib/python3.10/site-packages/jose/jwe.py b/venv/lib/python3.10/site-packages/jose/jwe.py new file mode 100644 index 0000000..2c387ff --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/jwe.py @@ -0,0 +1,607 @@ +import binascii +import json +import zlib +from collections.abc import Mapping +from struct import pack + +from . import jwk +from .backends import get_random_bytes +from .constants import ALGORITHMS, ZIPS +from .exceptions import JWEError, JWEParseError +from .utils import base64url_decode, base64url_encode, ensure_binary + + +def encrypt(plaintext, key, encryption=ALGORITHMS.A256GCM, algorithm=ALGORITHMS.DIR, zip=None, cty=None, kid=None): + """Encrypts plaintext and returns a JWE cmpact serialization string. + + Args: + plaintext (bytes): A bytes object to encrypt + key (str or dict): The key(s) to use for encrypting the content. Can be + individual JWK or JWK set. + encryption (str, optional): The content encryption algorithm used to + perform authenticated encryption on the plaintext to produce the + ciphertext and the Authentication Tag. Defaults to A256GCM. + algorithm (str, optional): The cryptographic algorithm used + to encrypt or determine the value of the CEK. Defaults to dir. + zip (str, optional): The compression algorithm) applied to the + plaintext before encryption. Defaults to None. + cty (str, optional): The media type for the secured content. + See http://www.iana.org/assignments/media-types/media-types.xhtml + kid (str, optional): Key ID for the provided key + + Returns: + bytes: The string representation of the header, encrypted key, + initialization vector, ciphertext, and authentication tag. + + Raises: + JWEError: If there is an error signing the token. + + Examples: + >>> from jose import jwe + >>> jwe.encrypt('Hello, World!', 'asecret128bitkey', algorithm='dir', encryption='A128GCM') + 'eyJhbGciOiJkaXIiLCJlbmMiOiJBMTI4R0NNIn0..McILMB3dYsNJSuhcDzQshA.OfX9H_mcUpHDeRM4IA.CcnTWqaqxNsjT4eCaUABSg' + + """ + plaintext = ensure_binary(plaintext) # Make sure it's bytes + if algorithm not in ALGORITHMS.SUPPORTED: + raise JWEError("Algorithm %s not supported." % algorithm) + if encryption not in ALGORITHMS.SUPPORTED: + raise JWEError("Algorithm %s not supported." % encryption) + key = jwk.construct(key, algorithm) + encoded_header = _encoded_header(algorithm, encryption, zip, cty, kid) + + plaintext = _compress(zip, plaintext) + enc_cek, iv, cipher_text, auth_tag = _encrypt_and_auth(key, algorithm, encryption, zip, plaintext, encoded_header) + + jwe_string = _jwe_compact_serialize(encoded_header, enc_cek, iv, cipher_text, auth_tag) + return jwe_string + + +def decrypt(jwe_str, key): + """Decrypts a JWE compact serialized string and returns the plaintext. + + Args: + jwe_str (str): A JWE to be decrypt. + key (str or dict): A key to attempt to decrypt the payload with. Can be + individual JWK or JWK set. + + Returns: + bytes: The plaintext bytes, assuming the authentication tag is valid. + + Raises: + JWEError: If there is an exception verifying the token. + + Examples: + >>> from jose import jwe + >>> jwe.decrypt(jwe_string, 'asecret128bitkey') + 'Hello, World!' + """ + header, encoded_header, encrypted_key, iv, cipher_text, auth_tag = _jwe_compact_deserialize(jwe_str) + + # Verify that the implementation understands and can process all + # fields that it is required to support, whether required by this + # specification, by the algorithms being used, or by the "crit" + # Header Parameter value, and that the values of those parameters + # are also understood and supported. + + try: + # Determine the Key Management Mode employed by the algorithm + # specified by the "alg" (algorithm) Header Parameter. + alg = header["alg"] + enc = header["enc"] + if alg not in ALGORITHMS.SUPPORTED: + raise JWEError("Algorithm %s not supported." % alg) + if enc not in ALGORITHMS.SUPPORTED: + raise JWEError("Algorithm %s not supported." % enc) + + except KeyError: + raise JWEParseError("alg and enc headers are required!") + + # Verify that the JWE uses a key known to the recipient. + key = jwk.construct(key, alg) + + # When Direct Key Agreement or Key Agreement with Key Wrapping are + # employed, use the key agreement algorithm to compute the value + # of the agreed upon key. When Direct Key Agreement is employed, + # let the CEK be the agreed upon key. When Key Agreement with Key + # Wrapping is employed, the agreed upon key will be used to + # decrypt the JWE Encrypted Key. + # + # When Key Wrapping, Key Encryption, or Key Agreement with Key + # Wrapping are employed, decrypt the JWE Encrypted Key to produce + # the CEK. The CEK MUST have a length equal to that required for + # the content encryption algorithm. Note that when there are + # multiple recipients, each recipient will only be able to decrypt + # JWE Encrypted Key values that were encrypted to a key in that + # recipient's possession. It is therefore normal to only be able + # to decrypt one of the per-recipient JWE Encrypted Key values to + # obtain the CEK value. Also, see Section 11.5 for security + # considerations on mitigating timing attacks. + if alg == ALGORITHMS.DIR: + # When Direct Key Agreement or Direct Encryption are employed, + # verify that the JWE Encrypted Key value is an empty octet + # sequence. + + # Record whether the CEK could be successfully determined for this + # recipient or not. + cek_valid = encrypted_key == b"" + + # When Direct Encryption is employed, let the CEK be the shared + # symmetric key. + cek_bytes = _get_key_bytes_from_key(key) + else: + try: + cek_bytes = key.unwrap_key(encrypted_key) + + # Record whether the CEK could be successfully determined for this + # recipient or not. + cek_valid = True + except NotImplementedError: + raise JWEError(f"alg {alg} is not implemented") + except Exception: + # Record whether the CEK could be successfully determined for this + # recipient or not. + cek_valid = False + + # To mitigate the attacks described in RFC 3218 [RFC3218], the + # recipient MUST NOT distinguish between format, padding, and length + # errors of encrypted keys. It is strongly recommended, in the event + # of receiving an improperly formatted key, that the recipient + # substitute a randomly generated CEK and proceed to the next step, to + # mitigate timing attacks. + cek_bytes = _get_random_cek_bytes_for_enc(enc) + + # Compute the Encoded Protected Header value BASE64URL(UTF8(JWE + # Protected Header)). If the JWE Protected Header is not present + # (which can only happen when using the JWE JSON Serialization and + # no "protected" member is present), let this value be the empty + # string. + protected_header = encoded_header + + # Let the Additional Authenticated Data encryption parameter be + # ASCII(Encoded Protected Header). However, if a JWE AAD value is + # present (which can only be the case when using the JWE JSON + # Serialization), instead let the Additional Authenticated Data + # encryption parameter be ASCII(Encoded Protected Header || '.' || + # BASE64URL(JWE AAD)). + aad = protected_header + + # Decrypt the JWE Ciphertext using the CEK, the JWE Initialization + # Vector, the Additional Authenticated Data value, and the JWE + # Authentication Tag (which is the Authentication Tag input to the + # calculation) using the specified content encryption algorithm, + # returning the decrypted plaintext and validating the JWE + # Authentication Tag in the manner specified for the algorithm, + # rejecting the input without emitting any decrypted output if the + # JWE Authentication Tag is incorrect. + try: + plain_text = _decrypt_and_auth(cek_bytes, enc, cipher_text, iv, aad, auth_tag) + except NotImplementedError: + raise JWEError(f"enc {enc} is not implemented") + except Exception as e: + raise JWEError(e) + + # If a "zip" parameter was included, uncompress the decrypted + # plaintext using the specified compression algorithm. + if plain_text is not None: + plain_text = _decompress(header.get("zip"), plain_text) + + return plain_text if cek_valid else None + + +def get_unverified_header(jwe_str): + """Returns the decoded headers without verification of any kind. + + Args: + jwe_str (str): A compact serialized JWE to decode the headers from. + + Returns: + dict: The dict representation of the JWE headers. + + Raises: + JWEError: If there is an exception decoding the JWE. + """ + header = _jwe_compact_deserialize(jwe_str)[0] + return header + + +def _decrypt_and_auth(cek_bytes, enc, cipher_text, iv, aad, auth_tag): + """ + Decrypt and verify the data + + Args: + cek_bytes (bytes): cek to derive encryption and possible auth key to + verify the auth tag + cipher_text (bytes): Encrypted data + iv (bytes): Initialization vector (iv) used to encrypt data + aad (bytes): Additional Authenticated Data used to verify the data + auth_tag (bytes): Authentication ntag to verify the data + + Returns: + (bytes): Decrypted data + """ + # Decrypt the JWE Ciphertext using the CEK, the JWE Initialization + # Vector, the Additional Authenticated Data value, and the JWE + # Authentication Tag (which is the Authentication Tag input to the + # calculation) using the specified content encryption algorithm, + # returning the decrypted plaintext + # and validating the JWE + # Authentication Tag in the manner specified for the algorithm, + if enc in ALGORITHMS.HMAC_AUTH_TAG: + encryption_key, mac_key, key_len = _get_encryption_key_mac_key_and_key_length_from_cek(cek_bytes, enc) + auth_tag_check = _auth_tag(cipher_text, iv, aad, mac_key, key_len) + elif enc in ALGORITHMS.GCM: + encryption_key = jwk.construct(cek_bytes, enc) + auth_tag_check = auth_tag # GCM check auth on decrypt + else: + raise NotImplementedError(f"enc {enc} is not implemented!") + + plaintext = encryption_key.decrypt(cipher_text, iv, aad, auth_tag) + if auth_tag != auth_tag_check: + raise JWEError("Invalid JWE Auth Tag") + + return plaintext + + +def _get_encryption_key_mac_key_and_key_length_from_cek(cek_bytes, enc): + derived_key_len = len(cek_bytes) // 2 + mac_key_bytes = cek_bytes[0:derived_key_len] + mac_key = _get_hmac_key(enc, mac_key_bytes) + encryption_key_bytes = cek_bytes[-derived_key_len:] + encryption_alg, _ = enc.split("-") + encryption_key = jwk.construct(encryption_key_bytes, encryption_alg) + return encryption_key, mac_key, derived_key_len + + +def _jwe_compact_deserialize(jwe_bytes): + """ + Deserialize and verify the header and segments are appropriate. + + Args: + jwe_bytes (bytes): The compact serialized JWE + Returns: + (dict, bytes, bytes, bytes, bytes, bytes) + """ + + # Base64url decode the encoded representations of the JWE + # Protected Header, the JWE Encrypted Key, the JWE Initialization + # Vector, the JWE Ciphertext, the JWE Authentication Tag, and the + # JWE AAD, following the restriction that no line breaks, + # whitespace, or other additional characters have been used. + jwe_bytes = ensure_binary(jwe_bytes) + try: + header_segment, encrypted_key_segment, iv_segment, cipher_text_segment, auth_tag_segment = jwe_bytes.split( + b".", 4 + ) + header_data = base64url_decode(header_segment) + except ValueError: + raise JWEParseError("Not enough segments") + except (TypeError, binascii.Error): + raise JWEParseError("Invalid header") + + # Verify that the octet sequence resulting from decoding the + # encoded JWE Protected Header is a UTF-8-encoded representation + # of a completely valid JSON object conforming to RFC 7159 + # [RFC7159]; let the JWE Protected Header be this JSON object. + # + # If using the JWE Compact Serialization, let the JOSE Header be + # the JWE Protected Header. Otherwise, when using the JWE JSON + # Serialization, let the JOSE Header be the union of the members + # of the JWE Protected Header, the JWE Shared Unprotected Header + # and the corresponding JWE Per-Recipient Unprotected Header, all + # of which must be completely valid JSON objects. During this + # step, verify that the resulting JOSE Header does not contain + # duplicate Header Parameter names. When using the JWE JSON + # Serialization, this restriction includes that the same Header + # Parameter name also MUST NOT occur in distinct JSON object + # values that together comprise the JOSE Header. + + try: + header = json.loads(header_data) + except ValueError as e: + raise JWEParseError(f"Invalid header string: {e}") + + if not isinstance(header, Mapping): + raise JWEParseError("Invalid header string: must be a json object") + + try: + encrypted_key = base64url_decode(encrypted_key_segment) + except (TypeError, binascii.Error): + raise JWEParseError("Invalid encrypted key") + + try: + iv = base64url_decode(iv_segment) + except (TypeError, binascii.Error): + raise JWEParseError("Invalid IV") + + try: + ciphertext = base64url_decode(cipher_text_segment) + except (TypeError, binascii.Error): + raise JWEParseError("Invalid cyphertext") + + try: + auth_tag = base64url_decode(auth_tag_segment) + except (TypeError, binascii.Error): + raise JWEParseError("Invalid auth tag") + + return header, header_segment, encrypted_key, iv, ciphertext, auth_tag + + +def _encoded_header(alg, enc, zip, cty, kid): + """ + Generate an appropriate JOSE header based on the values provided + Args: + alg (str): Key wrap/negotiation algorithm + enc (str): Encryption algorithm + zip (str): Compression method + cty (str): Content type of the encrypted data + kid (str): ID for the key used for the operation + + Returns: + bytes: JSON object of header based on input + """ + header = {"alg": alg, "enc": enc} + if zip: + header["zip"] = zip + if cty: + header["cty"] = cty + if kid: + header["kid"] = kid + json_header = json.dumps( + header, + separators=(",", ":"), + sort_keys=True, + ).encode("utf-8") + return base64url_encode(json_header) + + +def _big_endian(int_val): + return pack("!Q", int_val) + + +def _encrypt_and_auth(key, alg, enc, zip, plaintext, aad): + """ + Generate a content encryption key (cek) and initialization + vector (iv) based on enc and alg, compress the plaintext based on zip, + encrypt the compressed plaintext using the cek and iv based on enc + + Args: + key (Key): The key provided for encryption + alg (str): The algorithm use for key wrap/negotiation + enc (str): The encryption algorithm with which to encrypt the plaintext + zip (str): The compression algorithm with which to compress the plaintext + plaintext (bytes): The data to encrypt + aad (str): Additional authentication data utilized for generating an + auth tag + + Returns: + (bytes, bytes, bytes, bytes): A tuple of the following data + (key wrapped cek, iv, cipher text, auth tag) + """ + try: + cek_bytes, kw_cek = _get_cek(enc, alg, key) + except NotImplementedError: + raise JWEError(f"alg {alg} is not implemented") + + if enc in ALGORITHMS.HMAC_AUTH_TAG: + encryption_key, mac_key, key_len = _get_encryption_key_mac_key_and_key_length_from_cek(cek_bytes, enc) + iv, ciphertext, tag = encryption_key.encrypt(plaintext, aad) + auth_tag = _auth_tag(ciphertext, iv, aad, mac_key, key_len) + elif enc in ALGORITHMS.GCM: + encryption_key = jwk.construct(cek_bytes, enc) + iv, ciphertext, auth_tag = encryption_key.encrypt(plaintext, aad) + else: + raise NotImplementedError(f"enc {enc} is not implemented!") + + return kw_cek, iv, ciphertext, auth_tag + + +def _get_hmac_key(enc, mac_key_bytes): + """ + Get an HMACKey for the provided encryption algorithm and key bytes + + Args: + enc (str): Encryption algorithm + mac_key_bytes (bytes): vytes for the HMAC key + + Returns: + (HMACKey): The key to perform HMAC actions + """ + _, hash_alg = enc.split("-") + mac_key = jwk.construct(mac_key_bytes, hash_alg) + return mac_key + + +def _compress(zip, plaintext): + """ + Compress the plaintext based on the algorithm supplied + + Args: + zip (str): Compression Algorithm + plaintext (bytes): plaintext to compress + + Returns: + (bytes): Compressed plaintext + """ + if zip not in ZIPS.SUPPORTED: + raise NotImplementedError("ZIP {} is not supported!") + if zip is None: + compressed = plaintext + elif zip == ZIPS.DEF: + compressed = zlib.compress(plaintext) + else: + raise NotImplementedError("ZIP {} is not implemented!") + return compressed + + +def _decompress(zip, compressed): + """ + Decompress the plaintext based on the algorithm supplied + + Args: + zip (str): Compression Algorithm + plaintext (bytes): plaintext to decompress + + Returns: + (bytes): Compressed plaintext + """ + if zip not in ZIPS.SUPPORTED: + raise NotImplementedError("ZIP {} is not supported!") + if zip is None: + decompressed = compressed + elif zip == ZIPS.DEF: + decompressed = zlib.decompress(compressed) + else: + raise NotImplementedError("ZIP {} is not implemented!") + return decompressed + + +def _get_cek(enc, alg, key): + """ + Get the content encryption key + + Args: + enc (str): Encryption algorithm + alg (str): kwy wrap/negotiation algorithm + key (Key): Key provided to encryption method + + Return: + (bytes, bytes): Tuple of (cek bytes and wrapped cek) + """ + if alg == ALGORITHMS.DIR: + cek, wrapped_cek = _get_direct_key_wrap_cek(key) + else: + cek, wrapped_cek = _get_key_wrap_cek(enc, key) + + return cek, wrapped_cek + + +def _get_direct_key_wrap_cek(key): + """ + Get the cek and wrapped cek from the encryption key direct + + Args: + key (Key): Key provided to encryption method + + Return: + (Key, bytes): Tuple of (cek Key object and wrapped cek) + """ + # Get the JWK data to determine how to derive the cek + jwk_data = key.to_dict() + if jwk_data["kty"] == "oct": + # Get the last half of an octal key as the cek + cek_bytes = _get_key_bytes_from_key(key) + wrapped_cek = b"" + else: + raise NotImplementedError("JWK type {} not supported!".format(jwk_data["kty"])) + return cek_bytes, wrapped_cek + + +def _get_key_bytes_from_key(key): + """ + Get the raw key bytes from a Key object + + Args: + key (Key): Key from which to extract the raw key bytes + Returns: + (bytes) key data + """ + jwk_data = key.to_dict() + encoded_key = jwk_data["k"] + cek_bytes = base64url_decode(encoded_key) + return cek_bytes + + +def _get_key_wrap_cek(enc, key): + """_get_rsa_key_wrap_cek + Get the content encryption key for RSA key wrap + + Args: + enc (str): Encryption algorithm + key (Key): Key provided to encryption method + + Returns: + (Key, bytes): Tuple of (cek Key object and wrapped cek) + """ + cek_bytes = _get_random_cek_bytes_for_enc(enc) + wrapped_cek = key.wrap_key(cek_bytes) + return cek_bytes, wrapped_cek + + +def _get_random_cek_bytes_for_enc(enc): + """ + Get the random cek bytes based on the encryptionn algorithm + + Args: + enc (str): Encryption algorithm + + Returns: + (bytes) random bytes for cek key + """ + if enc == ALGORITHMS.A128GCM: + num_bits = 128 + elif enc == ALGORITHMS.A192GCM: + num_bits = 192 + elif enc in (ALGORITHMS.A128CBC_HS256, ALGORITHMS.A256GCM): + num_bits = 256 + elif enc == ALGORITHMS.A192CBC_HS384: + num_bits = 384 + elif enc == ALGORITHMS.A256CBC_HS512: + num_bits = 512 + else: + raise NotImplementedError(f"{enc} not supported") + cek_bytes = get_random_bytes(num_bits // 8) + return cek_bytes + + +def _auth_tag(ciphertext, iv, aad, mac_key, tag_length): + """ + Get ann auth tag from the provided data + + Args: + ciphertext (bytes): Encrypted value + iv (bytes): Initialization vector + aad (bytes): Additional Authenticated Data + mac_key (bytes): Key to use in generating the MAC + tag_length (int): How log the tag should be + + Returns: + (bytes) Auth tag + """ + al = _big_endian(len(aad) * 8) + auth_tag_input = aad + iv + ciphertext + al + signature = mac_key.sign(auth_tag_input) + auth_tag = signature[0:tag_length] + return auth_tag + + +def _jwe_compact_serialize(encoded_header, encrypted_cek, iv, cipher_text, auth_tag): + """ + Generate a compact serialized JWE + + Args: + encoded_header (bytes): Base64 URL Encoded JWE header JSON + encrypted_cek (bytes): Encrypted content encryption key (cek) + iv (bytes): Initialization vector (IV) + cipher_text (bytes): Cipher text + auth_tag (bytes): JWE Auth Tag + + Returns: + (str): JWE compact serialized string + """ + cipher_text = ensure_binary(cipher_text) + encoded_encrypted_cek = base64url_encode(encrypted_cek) + encoded_iv = base64url_encode(iv) + encoded_cipher_text = base64url_encode(cipher_text) + encoded_auth_tag = base64url_encode(auth_tag) + return ( + encoded_header + + b"." + + encoded_encrypted_cek + + b"." + + encoded_iv + + b"." + + encoded_cipher_text + + b"." + + encoded_auth_tag + ) diff --git a/venv/lib/python3.10/site-packages/jose/jwk.py b/venv/lib/python3.10/site-packages/jose/jwk.py new file mode 100644 index 0000000..7afc054 --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/jwk.py @@ -0,0 +1,79 @@ +from jose.backends.base import Key +from jose.constants import ALGORITHMS +from jose.exceptions import JWKError + +try: + from jose.backends import RSAKey # noqa: F401 +except ImportError: + pass + +try: + from jose.backends import ECKey # noqa: F401 +except ImportError: + pass + +try: + from jose.backends import AESKey # noqa: F401 +except ImportError: + pass + +try: + from jose.backends import DIRKey # noqa: F401 +except ImportError: + pass + +try: + from jose.backends import HMACKey # noqa: F401 +except ImportError: + pass + + +def get_key(algorithm): + if algorithm in ALGORITHMS.KEYS: + return ALGORITHMS.KEYS[algorithm] + elif algorithm in ALGORITHMS.HMAC: # noqa: F811 + return HMACKey + elif algorithm in ALGORITHMS.RSA: + from jose.backends import RSAKey # noqa: F811 + + return RSAKey + elif algorithm in ALGORITHMS.EC: + from jose.backends import ECKey # noqa: F811 + + return ECKey + elif algorithm in ALGORITHMS.AES: + from jose.backends import AESKey # noqa: F811 + + return AESKey + elif algorithm == ALGORITHMS.DIR: + from jose.backends import DIRKey # noqa: F811 + + return DIRKey + return None + + +def register_key(algorithm, key_class): + if not issubclass(key_class, Key): + raise TypeError("Key class is not a subclass of jwk.Key") + ALGORITHMS.KEYS[algorithm] = key_class + ALGORITHMS.SUPPORTED.add(algorithm) + return True + + +def construct(key_data, algorithm=None): + """ + Construct a Key object for the given algorithm with the given + key_data. + """ + + # Allow for pulling the algorithm off of the passed in jwk. + if not algorithm and isinstance(key_data, dict): + algorithm = key_data.get("alg", None) + + if not algorithm: + raise JWKError("Unable to find an algorithm for key: %s" % key_data) + + key_class = get_key(algorithm) + if not key_class: + raise JWKError("Unable to find an algorithm for key: %s" % key_data) + return key_class(key_data, algorithm) diff --git a/venv/lib/python3.10/site-packages/jose/jws.py b/venv/lib/python3.10/site-packages/jose/jws.py new file mode 100644 index 0000000..bfaf6bd --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/jws.py @@ -0,0 +1,266 @@ +import binascii +import json +from collections.abc import Iterable, Mapping + +from jose import jwk +from jose.backends.base import Key +from jose.constants import ALGORITHMS +from jose.exceptions import JWSError, JWSSignatureError +from jose.utils import base64url_decode, base64url_encode + + +def sign(payload, key, headers=None, algorithm=ALGORITHMS.HS256): + """Signs a claims set and returns a JWS string. + + Args: + payload (str or dict): A string to sign + key (str or dict): The key to use for signing the claim set. Can be + individual JWK or JWK set. + headers (dict, optional): A set of headers that will be added to + the default headers. Any headers that are added as additional + headers will override the default headers. + algorithm (str, optional): The algorithm to use for signing the + the claims. Defaults to HS256. + + Returns: + str: The string representation of the header, claims, and signature. + + Raises: + JWSError: If there is an error signing the token. + + Examples: + + >>> jws.sign({'a': 'b'}, 'secret', algorithm='HS256') + 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' + + """ + + if algorithm not in ALGORITHMS.SUPPORTED: + raise JWSError("Algorithm %s not supported." % algorithm) + + encoded_header = _encode_header(algorithm, additional_headers=headers) + encoded_payload = _encode_payload(payload) + signed_output = _sign_header_and_claims(encoded_header, encoded_payload, algorithm, key) + + return signed_output + + +def verify(token, key, algorithms, verify=True): + """Verifies a JWS string's signature. + + Args: + token (str): A signed JWS to be verified. + key (str or dict): A key to attempt to verify the payload with. Can be + individual JWK or JWK set. + algorithms (str or list): Valid algorithms that should be used to verify the JWS. + + Returns: + str: The str representation of the payload, assuming the signature is valid. + + Raises: + JWSError: If there is an exception verifying a token. + + Examples: + + >>> token = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' + >>> jws.verify(token, 'secret', algorithms='HS256') + + """ + + header, payload, signing_input, signature = _load(token) + + if verify: + _verify_signature(signing_input, header, signature, key, algorithms) + + return payload + + +def get_unverified_header(token): + """Returns the decoded headers without verification of any kind. + + Args: + token (str): A signed JWS to decode the headers from. + + Returns: + dict: The dict representation of the token headers. + + Raises: + JWSError: If there is an exception decoding the token. + """ + header, claims, signing_input, signature = _load(token) + return header + + +def get_unverified_headers(token): + """Returns the decoded headers without verification of any kind. + + This is simply a wrapper of get_unverified_header() for backwards + compatibility. + + Args: + token (str): A signed JWS to decode the headers from. + + Returns: + dict: The dict representation of the token headers. + + Raises: + JWSError: If there is an exception decoding the token. + """ + return get_unverified_header(token) + + +def get_unverified_claims(token): + """Returns the decoded claims without verification of any kind. + + Args: + token (str): A signed JWS to decode the headers from. + + Returns: + str: The str representation of the token claims. + + Raises: + JWSError: If there is an exception decoding the token. + """ + header, claims, signing_input, signature = _load(token) + return claims + + +def _encode_header(algorithm, additional_headers=None): + header = {"typ": "JWT", "alg": algorithm} + + if additional_headers: + header.update(additional_headers) + + json_header = json.dumps( + header, + separators=(",", ":"), + sort_keys=True, + ).encode("utf-8") + + return base64url_encode(json_header) + + +def _encode_payload(payload): + if isinstance(payload, Mapping): + try: + payload = json.dumps( + payload, + separators=(",", ":"), + ).encode("utf-8") + except ValueError: + pass + + return base64url_encode(payload) + + +def _sign_header_and_claims(encoded_header, encoded_claims, algorithm, key): + signing_input = b".".join([encoded_header, encoded_claims]) + try: + if not isinstance(key, Key): + key = jwk.construct(key, algorithm) + signature = key.sign(signing_input) + except Exception as e: + raise JWSError(e) + + encoded_signature = base64url_encode(signature) + + encoded_string = b".".join([encoded_header, encoded_claims, encoded_signature]) + + return encoded_string.decode("utf-8") + + +def _load(jwt): + if isinstance(jwt, str): + jwt = jwt.encode("utf-8") + try: + signing_input, crypto_segment = jwt.rsplit(b".", 1) + header_segment, claims_segment = signing_input.split(b".", 1) + header_data = base64url_decode(header_segment) + except ValueError: + raise JWSError("Not enough segments") + except (TypeError, binascii.Error): + raise JWSError("Invalid header padding") + + try: + header = json.loads(header_data.decode("utf-8")) + except ValueError as e: + raise JWSError("Invalid header string: %s" % e) + + if not isinstance(header, Mapping): + raise JWSError("Invalid header string: must be a json object") + + try: + payload = base64url_decode(claims_segment) + except (TypeError, binascii.Error): + raise JWSError("Invalid payload padding") + + try: + signature = base64url_decode(crypto_segment) + except (TypeError, binascii.Error): + raise JWSError("Invalid crypto padding") + + return (header, payload, signing_input, signature) + + +def _sig_matches_keys(keys, signing_input, signature, alg): + for key in keys: + if not isinstance(key, Key): + key = jwk.construct(key, alg) + try: + if key.verify(signing_input, signature): + return True + except Exception: + pass + return False + + +def _get_keys(key): + + if isinstance(key, Key): + return (key,) + + try: + key = json.loads(key, parse_int=str, parse_float=str) + except Exception: + pass + + if isinstance(key, Mapping): + if "keys" in key: + # JWK Set per RFC 7517 + return key["keys"] + elif "kty" in key: + # Individual JWK per RFC 7517 + return (key,) + else: + # Some other mapping. Firebase uses just dict of kid, cert pairs + values = key.values() + if values: + return values + return (key,) + + # Iterable but not text or mapping => list- or tuple-like + elif isinstance(key, Iterable) and not (isinstance(key, str) or isinstance(key, bytes)): + return key + + # Scalar value, wrap in tuple. + else: + return (key,) + + +def _verify_signature(signing_input, header, signature, key="", algorithms=None): + + alg = header.get("alg") + if not alg: + raise JWSError("No algorithm was specified in the JWS header.") + + if algorithms is not None and alg not in algorithms: + raise JWSError("The specified alg value is not allowed") + + keys = _get_keys(key) + try: + if not _sig_matches_keys(keys, signing_input, signature, alg): + raise JWSSignatureError() + except JWSSignatureError: + raise JWSError("Signature verification failed.") + except JWSError: + raise JWSError("Invalid or unsupported algorithm: %s" % alg) diff --git a/venv/lib/python3.10/site-packages/jose/jwt.py b/venv/lib/python3.10/site-packages/jose/jwt.py new file mode 100644 index 0000000..3f2142e --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/jwt.py @@ -0,0 +1,496 @@ +import json +from calendar import timegm +from collections.abc import Mapping +from datetime import datetime, timedelta + +from jose import jws + +from .constants import ALGORITHMS +from .exceptions import ExpiredSignatureError, JWSError, JWTClaimsError, JWTError +from .utils import calculate_at_hash, timedelta_total_seconds + + +def encode(claims, key, algorithm=ALGORITHMS.HS256, headers=None, access_token=None): + """Encodes a claims set and returns a JWT string. + + JWTs are JWS signed objects with a few reserved claims. + + Args: + claims (dict): A claims set to sign + key (str or dict): The key to use for signing the claim set. Can be + individual JWK or JWK set. + algorithm (str, optional): The algorithm to use for signing the + the claims. Defaults to HS256. + headers (dict, optional): A set of headers that will be added to + the default headers. Any headers that are added as additional + headers will override the default headers. + access_token (str, optional): If present, the 'at_hash' claim will + be calculated and added to the claims present in the 'claims' + parameter. + + Returns: + str: The string representation of the header, claims, and signature. + + Raises: + JWTError: If there is an error encoding the claims. + + Examples: + + >>> jwt.encode({'a': 'b'}, 'secret', algorithm='HS256') + 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' + + """ + + for time_claim in ["exp", "iat", "nbf"]: + + # Convert datetime to a intDate value in known time-format claims + if isinstance(claims.get(time_claim), datetime): + claims[time_claim] = timegm(claims[time_claim].utctimetuple()) + + if access_token: + claims["at_hash"] = calculate_at_hash(access_token, ALGORITHMS.HASHES[algorithm]) + + return jws.sign(claims, key, headers=headers, algorithm=algorithm) + + +def decode(token, key, algorithms=None, options=None, audience=None, issuer=None, subject=None, access_token=None): + """Verifies a JWT string's signature and validates reserved claims. + + Args: + token (str): A signed JWS to be verified. + key (str or dict): A key to attempt to verify the payload with. Can be + individual JWK or JWK set. + algorithms (str or list): Valid algorithms that should be used to verify the JWS. + audience (str): The intended audience of the token. If the "aud" claim is + included in the claim set, then the audience must be included and must equal + the provided claim. + issuer (str or iterable): Acceptable value(s) for the issuer of the token. + If the "iss" claim is included in the claim set, then the issuer must be + given and the claim in the token must be among the acceptable values. + subject (str): The subject of the token. If the "sub" claim is + included in the claim set, then the subject must be included and must equal + the provided claim. + access_token (str): An access token string. If the "at_hash" claim is included in the + claim set, then the access_token must be included, and it must match + the "at_hash" claim. + options (dict): A dictionary of options for skipping validation steps. + + defaults = { + 'verify_signature': True, + 'verify_aud': True, + 'verify_iat': True, + 'verify_exp': True, + 'verify_nbf': True, + 'verify_iss': True, + 'verify_sub': True, + 'verify_jti': True, + 'verify_at_hash': True, + 'require_aud': False, + 'require_iat': False, + 'require_exp': False, + 'require_nbf': False, + 'require_iss': False, + 'require_sub': False, + 'require_jti': False, + 'require_at_hash': False, + 'leeway': 0, + } + + Returns: + dict: The dict representation of the claims set, assuming the signature is valid + and all requested data validation passes. + + Raises: + JWTError: If the signature is invalid in any way. + ExpiredSignatureError: If the signature has expired. + JWTClaimsError: If any claim is invalid in any way. + + Examples: + + >>> payload = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' + >>> jwt.decode(payload, 'secret', algorithms='HS256') + + """ + + defaults = { + "verify_signature": True, + "verify_aud": True, + "verify_iat": True, + "verify_exp": True, + "verify_nbf": True, + "verify_iss": True, + "verify_sub": True, + "verify_jti": True, + "verify_at_hash": True, + "require_aud": False, + "require_iat": False, + "require_exp": False, + "require_nbf": False, + "require_iss": False, + "require_sub": False, + "require_jti": False, + "require_at_hash": False, + "leeway": 0, + } + + if options: + defaults.update(options) + + verify_signature = defaults.get("verify_signature", True) + + try: + payload = jws.verify(token, key, algorithms, verify=verify_signature) + except JWSError as e: + raise JWTError(e) + + # Needed for at_hash verification + algorithm = jws.get_unverified_header(token)["alg"] + + try: + claims = json.loads(payload.decode("utf-8")) + except ValueError as e: + raise JWTError("Invalid payload string: %s" % e) + + if not isinstance(claims, Mapping): + raise JWTError("Invalid payload string: must be a json object") + + _validate_claims( + claims, + audience=audience, + issuer=issuer, + subject=subject, + algorithm=algorithm, + access_token=access_token, + options=defaults, + ) + + return claims + + +def get_unverified_header(token): + """Returns the decoded headers without verification of any kind. + + Args: + token (str): A signed JWT to decode the headers from. + + Returns: + dict: The dict representation of the token headers. + + Raises: + JWTError: If there is an exception decoding the token. + """ + try: + headers = jws.get_unverified_headers(token) + except Exception: + raise JWTError("Error decoding token headers.") + + return headers + + +def get_unverified_headers(token): + """Returns the decoded headers without verification of any kind. + + This is simply a wrapper of get_unverified_header() for backwards + compatibility. + + Args: + token (str): A signed JWT to decode the headers from. + + Returns: + dict: The dict representation of the token headers. + + Raises: + JWTError: If there is an exception decoding the token. + """ + return get_unverified_header(token) + + +def get_unverified_claims(token): + """Returns the decoded claims without verification of any kind. + + Args: + token (str): A signed JWT to decode the headers from. + + Returns: + dict: The dict representation of the token claims. + + Raises: + JWTError: If there is an exception decoding the token. + """ + try: + claims = jws.get_unverified_claims(token) + except Exception: + raise JWTError("Error decoding token claims.") + + try: + claims = json.loads(claims.decode("utf-8")) + except ValueError as e: + raise JWTError("Invalid claims string: %s" % e) + + if not isinstance(claims, Mapping): + raise JWTError("Invalid claims string: must be a json object") + + return claims + + +def _validate_iat(claims): + """Validates that the 'iat' claim is valid. + + The "iat" (issued at) claim identifies the time at which the JWT was + issued. This claim can be used to determine the age of the JWT. Its + value MUST be a number containing a NumericDate value. Use of this + claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + """ + + if "iat" not in claims: + return + + try: + int(claims["iat"]) + except ValueError: + raise JWTClaimsError("Issued At claim (iat) must be an integer.") + + +def _validate_nbf(claims, leeway=0): + """Validates that the 'nbf' claim is valid. + + The "nbf" (not before) claim identifies the time before which the JWT + MUST NOT be accepted for processing. The processing of the "nbf" + claim requires that the current date/time MUST be after or equal to + the not-before date/time listed in the "nbf" claim. Implementers MAY + provide for some small leeway, usually no more than a few minutes, to + account for clock skew. Its value MUST be a number containing a + NumericDate value. Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + leeway (int): The number of seconds of skew that is allowed. + """ + + if "nbf" not in claims: + return + + try: + nbf = int(claims["nbf"]) + except ValueError: + raise JWTClaimsError("Not Before claim (nbf) must be an integer.") + + now = timegm(datetime.utcnow().utctimetuple()) + + if nbf > (now + leeway): + raise JWTClaimsError("The token is not yet valid (nbf)") + + +def _validate_exp(claims, leeway=0): + """Validates that the 'exp' claim is valid. + + The "exp" (expiration time) claim identifies the expiration time on + or after which the JWT MUST NOT be accepted for processing. The + processing of the "exp" claim requires that the current date/time + MUST be before the expiration date/time listed in the "exp" claim. + Implementers MAY provide for some small leeway, usually no more than + a few minutes, to account for clock skew. Its value MUST be a number + containing a NumericDate value. Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + leeway (int): The number of seconds of skew that is allowed. + """ + + if "exp" not in claims: + return + + try: + exp = int(claims["exp"]) + except ValueError: + raise JWTClaimsError("Expiration Time claim (exp) must be an integer.") + + now = timegm(datetime.utcnow().utctimetuple()) + + if exp < (now - leeway): + raise ExpiredSignatureError("Signature has expired.") + + +def _validate_aud(claims, audience=None): + """Validates that the 'aud' claim is valid. + + The "aud" (audience) claim identifies the recipients that the JWT is + intended for. Each principal intended to process the JWT MUST + identify itself with a value in the audience claim. If the principal + processing the claim does not identify itself with a value in the + "aud" claim when this claim is present, then the JWT MUST be + rejected. In the general case, the "aud" value is an array of case- + sensitive strings, each containing a StringOrURI value. In the + special case when the JWT has one audience, the "aud" value MAY be a + single case-sensitive string containing a StringOrURI value. The + interpretation of audience values is generally application specific. + Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + audience (str): The audience that is verifying the token. + """ + + if "aud" not in claims: + # if audience: + # raise JWTError('Audience claim expected, but not in claims') + return + + audience_claims = claims["aud"] + if isinstance(audience_claims, str): + audience_claims = [audience_claims] + if not isinstance(audience_claims, list): + raise JWTClaimsError("Invalid claim format in token") + if any(not isinstance(c, str) for c in audience_claims): + raise JWTClaimsError("Invalid claim format in token") + if audience not in audience_claims: + raise JWTClaimsError("Invalid audience") + + +def _validate_iss(claims, issuer=None): + """Validates that the 'iss' claim is valid. + + The "iss" (issuer) claim identifies the principal that issued the + JWT. The processing of this claim is generally application specific. + The "iss" value is a case-sensitive string containing a StringOrURI + value. Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + issuer (str or iterable): Acceptable value(s) for the issuer that + signed the token. + """ + + if issuer is not None: + if isinstance(issuer, str): + issuer = (issuer,) + if claims.get("iss") not in issuer: + raise JWTClaimsError("Invalid issuer") + + +def _validate_sub(claims, subject=None): + """Validates that the 'sub' claim is valid. + + The "sub" (subject) claim identifies the principal that is the + subject of the JWT. The claims in a JWT are normally statements + about the subject. The subject value MUST either be scoped to be + locally unique in the context of the issuer or be globally unique. + The processing of this claim is generally application specific. The + "sub" value is a case-sensitive string containing a StringOrURI + value. Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + subject (str): The subject of the token. + """ + + if "sub" not in claims: + return + + if not isinstance(claims["sub"], str): + raise JWTClaimsError("Subject must be a string.") + + if subject is not None: + if claims.get("sub") != subject: + raise JWTClaimsError("Invalid subject") + + +def _validate_jti(claims): + """Validates that the 'jti' claim is valid. + + The "jti" (JWT ID) claim provides a unique identifier for the JWT. + The identifier value MUST be assigned in a manner that ensures that + there is a negligible probability that the same value will be + accidentally assigned to a different data object; if the application + uses multiple issuers, collisions MUST be prevented among values + produced by different issuers as well. The "jti" claim can be used + to prevent the JWT from being replayed. The "jti" value is a case- + sensitive string. Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + """ + if "jti" not in claims: + return + + if not isinstance(claims["jti"], str): + raise JWTClaimsError("JWT ID must be a string.") + + +def _validate_at_hash(claims, access_token, algorithm): + """ + Validates that the 'at_hash' is valid. + + Its value is the base64url encoding of the left-most half of the hash + of the octets of the ASCII representation of the access_token value, + where the hash algorithm used is the hash algorithm used in the alg + Header Parameter of the ID Token's JOSE Header. For instance, if the + alg is RS256, hash the access_token value with SHA-256, then take the + left-most 128 bits and base64url encode them. The at_hash value is a + case sensitive string. Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + access_token (str): The access token returned by the OpenID Provider. + algorithm (str): The algorithm used to sign the JWT, as specified by + the token headers. + """ + if "at_hash" not in claims: + return + + if not access_token: + msg = "No access_token provided to compare against at_hash claim." + raise JWTClaimsError(msg) + + try: + expected_hash = calculate_at_hash(access_token, ALGORITHMS.HASHES[algorithm]) + except (TypeError, ValueError): + msg = "Unable to calculate at_hash to verify against token claims." + raise JWTClaimsError(msg) + + if claims["at_hash"] != expected_hash: + raise JWTClaimsError("at_hash claim does not match access_token.") + + +def _validate_claims(claims, audience=None, issuer=None, subject=None, algorithm=None, access_token=None, options=None): + + leeway = options.get("leeway", 0) + + if isinstance(leeway, timedelta): + leeway = timedelta_total_seconds(leeway) + required_claims = [e[len("require_") :] for e in options.keys() if e.startswith("require_") and options[e]] + for require_claim in required_claims: + if require_claim not in claims: + raise JWTError('missing required key "%s" among claims' % require_claim) + else: + options["verify_" + require_claim] = True # override verify when required + + if not isinstance(audience, ((str,), type(None))): + raise JWTError("audience must be a string or None") + + if options.get("verify_iat"): + _validate_iat(claims) + + if options.get("verify_nbf"): + _validate_nbf(claims, leeway=leeway) + + if options.get("verify_exp"): + _validate_exp(claims, leeway=leeway) + + if options.get("verify_aud"): + _validate_aud(claims, audience=audience) + + if options.get("verify_iss"): + _validate_iss(claims, issuer=issuer) + + if options.get("verify_sub"): + _validate_sub(claims, subject=subject) + + if options.get("verify_jti"): + _validate_jti(claims) + + if options.get("verify_at_hash"): + _validate_at_hash(claims, access_token, algorithm) diff --git a/venv/lib/python3.10/site-packages/jose/utils.py b/venv/lib/python3.10/site-packages/jose/utils.py new file mode 100644 index 0000000..fcef885 --- /dev/null +++ b/venv/lib/python3.10/site-packages/jose/utils.py @@ -0,0 +1,108 @@ +import base64 +import struct + +# Piggyback of the backends implementation of the function that converts a long +# to a bytes stream. Some plumbing is necessary to have the signatures match. +try: + from cryptography.utils import int_to_bytes as _long_to_bytes + + def long_to_bytes(n, blocksize=0): + return _long_to_bytes(n, blocksize or None) + + +except ImportError: + from ecdsa.ecdsa import int_to_string as _long_to_bytes + + def long_to_bytes(n, blocksize=0): + ret = _long_to_bytes(n) + if blocksize == 0: + return ret + else: + assert len(ret) <= blocksize + padding = blocksize - len(ret) + return b"\x00" * padding + ret + + +def long_to_base64(data, size=0): + return base64.urlsafe_b64encode(long_to_bytes(data, size)).strip(b"=") + + +def int_arr_to_long(arr): + return int("".join(["%02x" % byte for byte in arr]), 16) + + +def base64_to_long(data): + if isinstance(data, str): + data = data.encode("ascii") + + # urlsafe_b64decode will happily convert b64encoded data + _d = base64.urlsafe_b64decode(bytes(data) + b"==") + return int_arr_to_long(struct.unpack("%sB" % len(_d), _d)) + + +def calculate_at_hash(access_token, hash_alg): + """Helper method for calculating an access token + hash, as described in http://openid.net/specs/openid-connect-core-1_0.html#CodeIDToken + + Its value is the base64url encoding of the left-most half of the hash of the octets + of the ASCII representation of the access_token value, where the hash algorithm + used is the hash algorithm used in the alg Header Parameter of the ID Token's JOSE + Header. For instance, if the alg is RS256, hash the access_token value with SHA-256, + then take the left-most 128 bits and base64url encode them. The at_hash value is a + case sensitive string. + + Args: + access_token (str): An access token string. + hash_alg (callable): A callable returning a hash object, e.g. hashlib.sha256 + + """ + hash_digest = hash_alg(access_token.encode("utf-8")).digest() + cut_at = int(len(hash_digest) / 2) + truncated = hash_digest[:cut_at] + at_hash = base64url_encode(truncated) + return at_hash.decode("utf-8") + + +def base64url_decode(input): + """Helper method to base64url_decode a string. + + Args: + input (str): A base64url_encoded string to decode. + + """ + rem = len(input) % 4 + + if rem > 0: + input += b"=" * (4 - rem) + + return base64.urlsafe_b64decode(input) + + +def base64url_encode(input): + """Helper method to base64url_encode a string. + + Args: + input (str): A base64url_encoded string to encode. + + """ + return base64.urlsafe_b64encode(input).replace(b"=", b"") + + +def timedelta_total_seconds(delta): + """Helper method to determine the total number of seconds + from a timedelta. + + Args: + delta (timedelta): A timedelta to convert to seconds. + """ + return delta.days * 24 * 60 * 60 + delta.seconds + + +def ensure_binary(s): + """Coerce **s** to bytes.""" + + if isinstance(s, bytes): + return s + if isinstance(s, str): + return s.encode("utf-8", "strict") + raise TypeError(f"not expecting type '{type(s)}'") diff --git a/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/LICENSE.txt b/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..306c973 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/LICENSE.txt @@ -0,0 +1,20 @@ +Copyright (c) 2022 Ali-Akber Saifee + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/METADATA new file mode 100644 index 0000000..2f8483e --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/METADATA @@ -0,0 +1,177 @@ +Metadata-Version: 2.1 +Name: limits +Version: 2.8.0 +Summary: Rate limiting utilities +Home-page: https://limits.readthedocs.org +Author: Ali-Akber Saifee +Author-email: ali@indydevs.org +License: MIT +Project-URL: Source, https://github.com/alisaifee/limits +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: MacOS +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: OS Independent +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.7 +License-File: LICENSE.txt +Requires-Dist: deprecated (>=1.2) +Requires-Dist: setuptools +Requires-Dist: packaging (<23,>=21) +Requires-Dist: typing-extensions +Provides-Extra: all +Requires-Dist: redis (<5.0.0,>3) ; extra == 'all' +Requires-Dist: redis (>=4.2.0) ; extra == 'all' +Requires-Dist: pymemcache (<5.0.0,>3) ; extra == 'all' +Requires-Dist: pymongo (<5,>3) ; extra == 'all' +Requires-Dist: motor (<4,>=2.5) ; extra == 'all' +Requires-Dist: emcache (>=0.6.1) ; (python_version < "3.11") and extra == 'all' +Requires-Dist: coredis (<5,>=3.4.0) ; (python_version > "3.7") and extra == 'all' +Provides-Extra: async-memcached +Requires-Dist: emcache (>=0.6.1) ; (python_version < "3.11") and extra == 'async-memcached' +Provides-Extra: async-mongodb +Requires-Dist: motor (<4,>=2.5) ; extra == 'async-mongodb' +Provides-Extra: async-redis +Requires-Dist: coredis (<5,>=3.4.0) ; (python_version > "3.7") and extra == 'async-redis' +Provides-Extra: memcached +Requires-Dist: pymemcache (<5.0.0,>3) ; extra == 'memcached' +Provides-Extra: mongodb +Requires-Dist: pymongo (<5,>3) ; extra == 'mongodb' +Provides-Extra: redis +Requires-Dist: redis (<5.0.0,>3) ; extra == 'redis' +Provides-Extra: rediscluster +Requires-Dist: redis (>=4.2.0) ; extra == 'rediscluster' + +.. |ci| image:: https://github.com/alisaifee/limits/workflows/CI/badge.svg?branch=master + :target: https://github.com/alisaifee/limits/actions?query=branch%3Amaster+workflow%3ACI +.. |codecov| image:: https://codecov.io/gh/alisaifee/limits/branch/master/graph/badge.svg + :target: https://codecov.io/gh/alisaifee/limits +.. |pypi| image:: https://img.shields.io/pypi/v/limits.svg?style=flat-square + :target: https://pypi.python.org/pypi/limits +.. |pypi-versions| image:: https://img.shields.io/pypi/pyversions/limits?style=flat-square + :target: https://pypi.python.org/pypi/limits +.. |license| image:: https://img.shields.io/pypi/l/limits.svg?style=flat-square + :target: https://pypi.python.org/pypi/limits +.. |docs| image:: https://readthedocs.org/projects/limits/badge/?version=latest + :target: https://limits.readthedocs.org + +limits +------ +|docs| |ci| |codecov| |pypi| |pypi-versions| |license| + + +**limits** is a python library to perform rate limiting with commonly used storage backends (Redis, Memcached & MongoDB). + +---- + +Sponsored by Zuplo - fully-managed, programmable API Management platform. +Add rate limiting and more to your public API in minutes, try it at `zuplo.com `_ + +---- + +Supported Strategies +==================== +`Fixed Window `_ + This strategy resets at a fixed interval (start of minute, hour, day etc). + For example, given a rate limit of ``10/minute`` the strategy will: + + - Allow 10 requests between ``00:01:00`` and ``00:02:00`` + - Allow 10 requests at ``00:00:59`` and 10 more requests at ``00:01:00`` + + +`Fixed Window (Elastic) `_ + Identical to Fixed window, except every breach of rate limit results in an extension + to the time out. For example a rate limit of `1/minute` hit twice within a minute will + result in a lock-out for two minutes. + +`Moving Window `_ + Sliding window strategy enforces a rate limit of N/(m time units) + on the **last m** time units at the second granularity. + + For example, with a rate limit of ``10/minute``: + + - Allow 9 requests that arrive at ``00:00:59`` + - Allow another request that arrives at ``00:01:00`` + - Reject the request that arrives at ``00:01:01`` + +Storage backends +================ + +- `Redis `_ +- `Memcached `_ +- `In-Memory `_ +- `MongoDB `_ + +Dive right in +============= + +Initialize the storage backend + +.. code-block:: python + + from limits import storage + memory_storage = storage.MemoryStorage() + # or memcached + memcached_storage = storage.MemcachedStorage("memcached://localhost:11211") + # or redis + redis_storage = storage.RedisStorage("redis://localhost:6379") + # or leave it to fate + some_storage = storage.storage.from_string(fate) + +Initialize a rate limiter with the Moving Window Strategy + +.. code-block:: python + + from limits import strategies + moving_window = strategies.MovingWindowRateLimiter(memory_storage) + + +Initialize a rate limit + +.. code-block:: python + + from limits import parse + one_per_minute = parse("1/minute") + +Initialize a rate limit explicitly + +.. code-block:: python + + from limits import RateLimitItemPerSecond + one_per_second = RateLimitItemPerSecond(1, 1) + +Test the limits + +.. code-block:: python + + assert True == moving_window.hit(one_per_minute, "test_namespace", "foo") + assert False == moving_window.hit(one_per_minute, "test_namespace", "foo") + assert True == moving_window.hit(one_per_minute, "test_namespace", "bar") + + assert True == moving_window.hit(one_per_second, "test_namespace", "foo") + assert False == moving_window.hit(one_per_second, "test_namespace", "foo") + time.sleep(1) + assert True == moving_window.hit(one_per_second, "test_namespace", "foo") + +Check specific limits without hitting them + +.. code-block:: python + + assert True == moving_window.hit(one_per_second, "test_namespace", "foo") + while not moving_window.test(one_per_second, "test_namespace", "foo"): + time.sleep(0.01) + assert True == moving_window.hit(one_per_second, "test_namespace", "foo") + +Links +===== + +* `Documentation `_ +* `Changelog `_ + diff --git a/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/RECORD new file mode 100644 index 0000000..4b29c90 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/RECORD @@ -0,0 +1,64 @@ +limits-2.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +limits-2.8.0.dist-info/LICENSE.txt,sha256=92DDW6sU3SOesVpQcApB264W5u0Fo4LBgeU6FH5NO2E,1061 +limits-2.8.0.dist-info/METADATA,sha256=kmI_Iibh5uq4qQnpnYr1t39TJhku8LxStDS1QwoLh7s,6836 +limits-2.8.0.dist-info/RECORD,, +limits-2.8.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +limits-2.8.0.dist-info/top_level.txt,sha256=C7g5ahldPoU2s6iWTaJayUrbGmPK1d6e9t5Nn0vQ2jM,7 +limits/__init__.py,sha256=zxxUNaVjpDuD441Gkr75M6IJbyFLDKPg6UH1ZXEYv8M,696 +limits/__pycache__/__init__.cpython-310.pyc,, +limits/__pycache__/_version.cpython-310.pyc,, +limits/__pycache__/errors.cpython-310.pyc,, +limits/__pycache__/limits.cpython-310.pyc,, +limits/__pycache__/strategies.cpython-310.pyc,, +limits/__pycache__/typing.cpython-310.pyc,, +limits/__pycache__/util.cpython-310.pyc,, +limits/__pycache__/version.cpython-310.pyc,, +limits/_version.py,sha256=4MaFAXszOsvVttErPRIw9iXY8aq0U7HIdoZhLLdsaF4,497 +limits/aio/__init__.py,sha256=IOetunwQy1c5GefzitK8lewbTzHGiE-kmE9NlqSdr3U,82 +limits/aio/__pycache__/__init__.cpython-310.pyc,, +limits/aio/__pycache__/strategies.cpython-310.pyc,, +limits/aio/storage/__init__.py,sha256=tHvZ_fENBMRJKvUV2CDLQnmhjb_m6waak8ot_ramVKA,546 +limits/aio/storage/__pycache__/__init__.cpython-310.pyc,, +limits/aio/storage/__pycache__/base.cpython-310.pyc,, +limits/aio/storage/__pycache__/memcached.cpython-310.pyc,, +limits/aio/storage/__pycache__/memory.cpython-310.pyc,, +limits/aio/storage/__pycache__/mongodb.cpython-310.pyc,, +limits/aio/storage/__pycache__/redis.cpython-310.pyc,, +limits/aio/storage/base.py,sha256=gsy_91lvm53AoAKi1plaegcEOSZXK3OiMP0mxA6lPAw,2941 +limits/aio/storage/memcached.py,sha256=aR7f5NY-mXtFFjphbaxoL7z1cAHZuRYjpGUVOktoI4A,4296 +limits/aio/storage/memory.py,sha256=ng-mR5Th5IpdARxkE7k3qgNL33muhDOjdxvplcItnYU,5521 +limits/aio/storage/mongodb.py,sha256=JOeDrhthmZ8Rxsr951URWLeqSSyheJX-KsihbbXSK_Y,9004 +limits/aio/storage/redis.py,sha256=0UpUTHl7vSjpB5pB1vVcuPX4V00LsZT9vqWunLlCxTs,14561 +limits/aio/strategies.py,sha256=9HJCv0PcjBGOQ-GeUZmxqw1GRgeVkS52GQ1DTdHKtZQ,6477 +limits/errors.py,sha256=EM0USAHEextLk4cXra_5zsTqe7vc3uLezoS_Re9Oeh8,150 +limits/limits.py,sha256=lwQnA5wegkW_AXtplOH3tLuQ1LByMX9hqHJquYVYdTs,4943 +limits/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +limits/resources/redis/lua_scripts/acquire_moving_window.lua,sha256=n0X9gGdIIb3eDmXHt4g66fEfT6n5JttwBJAcvx_Y2KY,492 +limits/resources/redis/lua_scripts/clear_keys.lua,sha256=zU0cVfLGmapRQF9x9u0GclapM_IB2pJLszNzVQ1QRK4,184 +limits/resources/redis/lua_scripts/gcra_consume.lua,sha256=bqTDE6_7LYqvqJCSo7mmJ_tGedL9CyWMwwBNXFuHdMk,1251 +limits/resources/redis/lua_scripts/incr_expire.lua,sha256=Uq9NcrrcDI-F87TDAJexoSJn2SDgeXIUEYozCp9S3oA,195 +limits/resources/redis/lua_scripts/moving_window.lua,sha256=ir0SkuRVnrqkVSFNIuedTV_KW6zG70Z56u6-_FpR_20,352 +limits/storage/__init__.py,sha256=C6ZwMetu4_7C46eZcs0Sfqdn2jelU5mT_hP5arH9F98,2543 +limits/storage/__pycache__/__init__.cpython-310.pyc,, +limits/storage/__pycache__/base.cpython-310.pyc,, +limits/storage/__pycache__/gae_memcached.cpython-310.pyc,, +limits/storage/__pycache__/memcached.cpython-310.pyc,, +limits/storage/__pycache__/memory.cpython-310.pyc,, +limits/storage/__pycache__/mongodb.cpython-310.pyc,, +limits/storage/__pycache__/redis.cpython-310.pyc,, +limits/storage/__pycache__/redis_cluster.cpython-310.pyc,, +limits/storage/__pycache__/redis_sentinel.cpython-310.pyc,, +limits/storage/__pycache__/registry.cpython-310.pyc,, +limits/storage/base.py,sha256=-JV-zAkss7pOETZyPYjo8ZZqTMGs-DPqgz_gcfArXfs,2818 +limits/storage/gae_memcached.py,sha256=ImPXzJQ6aWcnHn8z9t4vo4lS_16V24YDyXnLj0lYcwk,2312 +limits/storage/memcached.py,sha256=qMvRIEtRRzZXSgcZSUTBgUlBDeNOpIr_gDTV2r5SPak,6005 +limits/storage/memory.py,sha256=LxGdPfhBq2CvZq5Jl5WYFqYKh0BtCKpdrqDdKwATR68,5167 +limits/storage/mongodb.py,sha256=a_ccM4Y9pnqqZgitJvNkLboUnqYmb1U7jXQXtikmW4o,7841 +limits/storage/redis.py,sha256=nysRkOIK9pKTC1p0mBpIu5wHjmihLsFbwOx8nfFMpaI,7560 +limits/storage/redis_cluster.py,sha256=R7ioP9jeRN8WOuoxhUb9UJUHu1C-5ujDP-95ZiV2ebc,5029 +limits/storage/redis_sentinel.py,sha256=fbTHyxeNsnGG4ll3hMDejE2C2DqWPpKwE3JklUY9pHA,3835 +limits/storage/registry.py,sha256=xcBcxuu6srqmoS4WqDpkCXnRLB19ctH98v21P8S9kS8,708 +limits/strategies.py,sha256=GcO2daLq9qmBhAll7RUPNsO_mrXp0-I9WTgestvSfj4,6729 +limits/typing.py,sha256=wCqVGECCAxcIQwOtQUIZ1xsiJoC5JeAopz4NobIdgjM,2928 +limits/util.py,sha256=aYd3TiQr_qS-K6EX_FpKhYNmHQOmp3IOgJ1_AqgrRSA,5438 +limits/version.py,sha256=YwkF3dtq1KGzvmL3iVGctA8NNtGlK_0arrzZkZGVjUs,47 diff --git a/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/WHEEL new file mode 100644 index 0000000..57e3d84 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/top_level.txt new file mode 100644 index 0000000..3b34508 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits-2.8.0.dist-info/top_level.txt @@ -0,0 +1 @@ +limits diff --git a/venv/lib/python3.10/site-packages/limits/__init__.py b/venv/lib/python3.10/site-packages/limits/__init__.py new file mode 100644 index 0000000..955b9d9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/__init__.py @@ -0,0 +1,32 @@ +""" +Rate limiting with commonly used storage backends +""" + +from . import _version, aio, storage, strategies +from .limits import ( + RateLimitItem, + RateLimitItemPerDay, + RateLimitItemPerHour, + RateLimitItemPerMinute, + RateLimitItemPerMonth, + RateLimitItemPerSecond, + RateLimitItemPerYear, +) +from .util import parse, parse_many + +__all__ = [ + "RateLimitItem", + "RateLimitItemPerYear", + "RateLimitItemPerMonth", + "RateLimitItemPerDay", + "RateLimitItemPerHour", + "RateLimitItemPerMinute", + "RateLimitItemPerSecond", + "aio", + "storage", + "strategies", + "parse", + "parse_many", +] + +__version__ = _version.get_versions()["version"] # type: ignore diff --git a/venv/lib/python3.10/site-packages/limits/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..ee2fe6b Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/__pycache__/_version.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/__pycache__/_version.cpython-310.pyc new file mode 100644 index 0000000..ecbd78b Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/__pycache__/_version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/__pycache__/errors.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/__pycache__/errors.cpython-310.pyc new file mode 100644 index 0000000..a014e50 Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/__pycache__/errors.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/__pycache__/limits.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/__pycache__/limits.cpython-310.pyc new file mode 100644 index 0000000..13aab5e Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/__pycache__/limits.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/__pycache__/strategies.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/__pycache__/strategies.cpython-310.pyc new file mode 100644 index 0000000..3d7abd8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/__pycache__/strategies.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/__pycache__/typing.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/__pycache__/typing.cpython-310.pyc new file mode 100644 index 0000000..83d488b Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/__pycache__/typing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/__pycache__/util.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000..8ca9ff9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/__pycache__/util.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/__pycache__/version.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/__pycache__/version.cpython-310.pyc new file mode 100644 index 0000000..f2c7e07 Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/__pycache__/version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/_version.py b/venv/lib/python3.10/site-packages/limits/_version.py new file mode 100644 index 0000000..091b1fb --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/_version.py @@ -0,0 +1,21 @@ + +# This file was generated by 'versioneer.py' (0.22) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +{ + "date": "2022-12-23T13:41:46-0800", + "dirty": false, + "error": null, + "full-revisionid": "9ee3d6886c3a2d6fe5a89fe4c589f6ed09617f64", + "version": "2.8.0" +} +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) diff --git a/venv/lib/python3.10/site-packages/limits/aio/__init__.py b/venv/lib/python3.10/site-packages/limits/aio/__init__.py new file mode 100644 index 0000000..7514956 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/aio/__init__.py @@ -0,0 +1,6 @@ +from . import storage, strategies + +__all__ = [ + "storage", + "strategies", +] diff --git a/venv/lib/python3.10/site-packages/limits/aio/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/aio/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..39e8528 Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/aio/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/aio/__pycache__/strategies.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/aio/__pycache__/strategies.cpython-310.pyc new file mode 100644 index 0000000..7e76251 Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/aio/__pycache__/strategies.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/aio/storage/__init__.py b/venv/lib/python3.10/site-packages/limits/aio/storage/__init__.py new file mode 100644 index 0000000..5432410 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/aio/storage/__init__.py @@ -0,0 +1,21 @@ +""" +Implementations of storage backends to be used with +:class:`limits.aio.strategies.RateLimiter` strategies +""" + +from .base import MovingWindowSupport, Storage +from .memcached import MemcachedStorage +from .memory import MemoryStorage +from .mongodb import MongoDBStorage +from .redis import RedisClusterStorage, RedisSentinelStorage, RedisStorage + +__all__ = [ + "Storage", + "MovingWindowSupport", + "MemcachedStorage", + "MemoryStorage", + "MongoDBStorage", + "RedisStorage", + "RedisClusterStorage", + "RedisSentinelStorage", +] diff --git a/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..935c6cd Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000..212eeed Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/base.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/memcached.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/memcached.cpython-310.pyc new file mode 100644 index 0000000..cf1fb7a Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/memcached.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/memory.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/memory.cpython-310.pyc new file mode 100644 index 0000000..a52beff Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/memory.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/mongodb.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/mongodb.cpython-310.pyc new file mode 100644 index 0000000..ccc9a4c Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/mongodb.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/redis.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/redis.cpython-310.pyc new file mode 100644 index 0000000..5355ae2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/aio/storage/__pycache__/redis.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/aio/storage/base.py b/venv/lib/python3.10/site-packages/limits/aio/storage/base.py new file mode 100644 index 0000000..53d04d2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/aio/storage/base.py @@ -0,0 +1,105 @@ +from abc import ABC, abstractmethod + +from deprecated.sphinx import versionadded + +from limits.storage.registry import StorageRegistry +from limits.typing import List, Optional, Tuple, Union +from limits.util import LazyDependency + + +@versionadded(version="2.1") +class Storage(LazyDependency, metaclass=StorageRegistry): + """ + Base class to extend when implementing an async storage backend. + """ + + STORAGE_SCHEME: Optional[List[str]] + """The storage schemes to register against this implementation""" + + def __init__( + self, uri: Optional[str] = None, **options: Union[float, str, bool] + ) -> None: + super().__init__() + + @abstractmethod + async def incr( + self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1 + ) -> int: + """ + increments the counter for a given rate limit key + + :param key: the key to increment + :param expiry: amount in seconds for the key to expire in + :param elastic_expiry: whether to keep extending the rate limit + window every hit. + :param amount: the number to increment by + """ + raise NotImplementedError + + @abstractmethod + async def get(self, key: str) -> int: + """ + :param key: the key to get the counter value for + """ + raise NotImplementedError + + @abstractmethod + async def get_expiry(self, key: str) -> int: + """ + :param key: the key to get the expiry for + """ + raise NotImplementedError + + @abstractmethod + async def check(self) -> bool: + """ + check if storage is healthy + """ + raise NotImplementedError + + @abstractmethod + async def reset(self) -> Optional[int]: + """ + reset storage to clear limits + """ + raise NotImplementedError + + @abstractmethod + async def clear(self, key: str) -> None: + """ + resets the rate limit key + + :param key: the key to clear rate limits for + """ + raise NotImplementedError + + +class MovingWindowSupport(ABC): + """ + Abstract base for storages that intend to support + the moving window strategy + """ + + async def acquire_entry( + self, key: str, limit: int, expiry: int, amount: int = 1 + ) -> bool: + """ + :param key: rate limit key to acquire an entry in + :param limit: amount of entries allowed + :param expiry: expiry of the entry + :param amount: the number of entries to acquire + """ + raise NotImplementedError + + async def get_moving_window( + self, key: str, limit: int, expiry: int + ) -> Tuple[int, int]: + """ + returns the starting point and the number of entries in the moving + window + + :param key: rate limit key + :param expiry: expiry of entry + :return: (start of window, number of acquired entries) + """ + raise NotImplementedError diff --git a/venv/lib/python3.10/site-packages/limits/aio/storage/memcached.py b/venv/lib/python3.10/site-packages/limits/aio/storage/memcached.py new file mode 100644 index 0000000..4a2c07f --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/aio/storage/memcached.py @@ -0,0 +1,136 @@ +import time +import urllib.parse + +from deprecated.sphinx import versionadded + +from limits.aio.storage.base import Storage +from limits.typing import EmcacheClientP, Optional, Union + + +@versionadded(version="2.1") +class MemcachedStorage(Storage): + """ + Rate limit storage with memcached as backend. + + Depends on :pypi:`emcache` + """ + + STORAGE_SCHEME = ["async+memcached"] + """The storage scheme for memcached to be used in an async context""" + + DEPENDENCIES = ["emcache"] + + def __init__(self, uri: str, **options: Union[float, str, bool]) -> None: + """ + :param uri: memcached location of the form + ``async+memcached://host:port,host:port`` + :param options: all remaining keyword arguments are passed + directly to the constructor of :class:`emcache.Client` + :raise ConfigurationError: when :pypi:`emcache` is not available + """ + parsed = urllib.parse.urlparse(uri) + self.hosts = [] + + for host, port in ( + loc.split(":") for loc in parsed.netloc.strip().split(",") if loc.strip() + ): + self.hosts.append((host, int(port))) + + self._options = options + self._storage = None + super().__init__(uri, **options) + self.dependency = self.dependencies["emcache"].module + + async def get_storage(self) -> EmcacheClientP: + if not self._storage: + self._storage = await self.dependency.create_client( + [self.dependency.MemcachedHostAddress(h, p) for h, p in self.hosts], + **self._options, + ) + assert self._storage + return self._storage + + async def get(self, key: str) -> int: + """ + :param key: the key to get the counter value for + """ + + item = await (await self.get_storage()).get(key.encode("utf-8")) + + return item and int(item.value) or 0 + + async def clear(self, key: str) -> None: + """ + :param key: the key to clear rate limits for + """ + await (await self.get_storage()).delete(key.encode("utf-8")) + + async def incr( + self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1 + ) -> int: + """ + increments the counter for a given rate limit key + + :param key: the key to increment + :param expiry: amount in seconds for the key to expire in + :param elastic_expiry: whether to keep extending the rate limit + window every hit. + :param amount: the number to increment by + """ + storage = await self.get_storage() + limit_key = key.encode("utf-8") + expire_key = f"{key}/expires".encode() + added = True + try: + await storage.add(limit_key, f"{amount}".encode(), exptime=expiry) + except self.dependency.NotStoredStorageCommandError: + added = False + storage = await self.get_storage() + + if not added: + value = await storage.increment(limit_key, amount) or amount + + if elastic_expiry: + await storage.touch(limit_key, exptime=expiry) + await storage.set( + expire_key, + str(expiry + time.time()).encode("utf-8"), + exptime=expiry, + noreply=False, + ) + + return value + else: + await storage.set( + expire_key, + str(expiry + time.time()).encode("utf-8"), + exptime=expiry, + noreply=False, + ) + + return amount + + async def get_expiry(self, key: str) -> int: + """ + :param key: the key to get the expiry for + """ + storage = await self.get_storage() + item = await storage.get(f"{key}/expires".encode()) + + return int(item and float(item.value) or time.time()) + + async def check(self) -> bool: + """ + Check if storage is healthy by calling the ``get`` command + on the key ``limiter-check`` + """ + try: + storage = await self.get_storage() + await storage.get(b"limiter-check") + + return True + except: # noqa + return False + + async def reset(self) -> Optional[int]: + raise NotImplementedError diff --git a/venv/lib/python3.10/site-packages/limits/aio/storage/memory.py b/venv/lib/python3.10/site-packages/limits/aio/storage/memory.py new file mode 100644 index 0000000..5fb5dbb --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/aio/storage/memory.py @@ -0,0 +1,176 @@ +import asyncio +import time +from collections import Counter + +from deprecated.sphinx import versionadded + +import limits.typing +from limits.aio.storage.base import MovingWindowSupport, Storage +from limits.typing import Dict, List, Optional, Tuple + + +class LockableEntry(asyncio.Lock): + def __init__(self, expiry: int) -> None: + self.atime = time.time() + self.expiry = self.atime + expiry + super().__init__() + + +@versionadded(version="2.1") +class MemoryStorage(Storage, MovingWindowSupport): + """ + rate limit storage using :class:`collections.Counter` + as an in memory storage for fixed and elastic window strategies, + and a simple list to implement moving window strategy. + """ + + STORAGE_SCHEME = ["async+memory"] + """ + The storage scheme for in process memory storage for use in an + async context + """ + + def __init__(self, uri: Optional[str] = None, **_: str) -> None: + self.storage: limits.typing.Counter[str] = Counter() + self.expirations: Dict[str, float] = {} + self.events: Dict[str, List[LockableEntry]] = {} + self.timer: Optional[asyncio.Task[None]] = None + super().__init__(uri, **_) + + async def __expire_events(self) -> None: + for key in self.events.keys(): + for event in list(self.events[key]): + async with event: + if event.expiry <= time.time() and event in self.events[key]: + self.events[key].remove(event) + + for key in list(self.expirations.keys()): + if self.expirations[key] <= time.time(): + self.storage.pop(key, None) + self.expirations.pop(key, None) + + async def __schedule_expiry(self) -> None: + if not self.timer or self.timer.done(): + self.timer = asyncio.create_task(self.__expire_events()) + + async def incr( + self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1 + ) -> int: + """ + increments the counter for a given rate limit key + + :param key: the key to increment + :param expiry: amount in seconds for the key to expire in + :param elastic_expiry: whether to keep extending the rate limit + window every hit. + :param amount: the number to increment by + """ + await self.get(key) + await self.__schedule_expiry() + self.storage[key] += amount + + if elastic_expiry or self.storage[key] == amount: + self.expirations[key] = time.time() + expiry + + return self.storage.get(key, amount) + + async def get(self, key: str) -> int: + """ + :param key: the key to get the counter value for + """ + + if self.expirations.get(key, 0) <= time.time(): + self.storage.pop(key, None) + self.expirations.pop(key, None) + + return self.storage.get(key, 0) + + async def clear(self, key: str) -> None: + """ + :param key: the key to clear rate limits for + """ + self.storage.pop(key, None) + self.expirations.pop(key, None) + self.events.pop(key, None) + + async def acquire_entry( + self, key: str, limit: int, expiry: int, amount: int = 1 + ) -> bool: + """ + :param key: rate limit key to acquire an entry in + :param limit: amount of entries allowed + :param expiry: expiry of the entry + :param amount: the number of entries to acquire + """ + self.events.setdefault(key, []) + await self.__schedule_expiry() + timestamp = time.time() + try: + entry: Optional[LockableEntry] = self.events[key][limit - amount] + except IndexError: + entry = None + + if entry and entry.atime >= timestamp - expiry: + return False + else: + self.events[key][:0] = [LockableEntry(expiry) for _ in range(amount)] + + return True + + async def get_expiry(self, key: str) -> int: + """ + :param key: the key to get the expiry for + """ + + return int(self.expirations.get(key, time.time())) + + async def get_num_acquired(self, key: str, expiry: int) -> int: + """ + returns the number of entries already acquired + + :param key: rate limit key to acquire an entry in + :param expiry: expiry of the entry + """ + timestamp = time.time() + + return ( + len([k for k in self.events[key] if k.atime >= timestamp - expiry]) + if self.events.get(key) + else 0 + ) + + # FIXME: arg limit is not used + async def get_moving_window( + self, key: str, limit: int, expiry: int + ) -> Tuple[int, int]: + """ + returns the starting point and the number of entries in the moving + window + + :param key: rate limit key + :param expiry: expiry of entry + :return: (start of window, number of acquired entries) + """ + timestamp = time.time() + acquired = await self.get_num_acquired(key, expiry) + + for item in self.events.get(key, []): + if item.atime >= timestamp - expiry: + return int(item.atime), acquired + + return int(timestamp), acquired + + async def check(self) -> bool: + """ + check if storage is healthy + """ + + return True + + async def reset(self) -> Optional[int]: + num_items = len(self.storage) + self.storage.clear() + self.expirations.clear() + self.events.clear() + + return num_items diff --git a/venv/lib/python3.10/site-packages/limits/aio/storage/mongodb.py b/venv/lib/python3.10/site-packages/limits/aio/storage/mongodb.py new file mode 100644 index 0000000..c819757 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/aio/storage/mongodb.py @@ -0,0 +1,267 @@ +from __future__ import annotations + +import asyncio +import calendar +import datetime +import time +from typing import Any + +from deprecated.sphinx import versionadded + +from limits.aio.storage.base import MovingWindowSupport, Storage +from limits.typing import Dict, Optional, ParamSpec, Tuple, TypeVar, Union + +P = ParamSpec("P") +R = TypeVar("R") + + +@versionadded(version="2.1") +class MongoDBStorage(Storage, MovingWindowSupport): + """ + Rate limit storage with MongoDB as backend. + + Depends on :pypi:`motor` + """ + + STORAGE_SCHEME = ["async+mongodb", "async+mongodb+srv"] + """ + The storage scheme for MongoDB for use in an async context + """ + + DEFAULT_OPTIONS: Dict[str, Union[float, str, bool]] = { + "serverSelectionTimeoutMS": 1000, + "socketTimeoutMS": 1000, + "connectTimeoutMS": 1000, + } + "Default options passed to :class:`~motor.motor_asyncio.AsyncIOMotorClient`" + + DEPENDENCIES = ["motor.motor_asyncio", "pymongo"] + + def __init__( + self, + uri: str, + database_name: str = "limits", + **options: Union[float, str, bool], + ) -> None: + """ + :param uri: uri of the form ``async+mongodb://[user:password]@host:port?...``, + This uri is passed directly to :class:`~motor.motor_asyncio.AsyncIOMotorClient` + :param database_name: The database to use for storing the rate limit + collections. + :param options: all remaining keyword arguments are merged with + :data:`DEFAULT_OPTIONS` and passed to the constructor of + :class:`~motor.motor_asyncio.AsyncIOMotorClient` + :raise ConfigurationError: when the :pypi:`motor` or :pypi:`pymongo` are + not available + """ + + mongo_opts = options.copy() + [mongo_opts.setdefault(k, v) for k, v in self.DEFAULT_OPTIONS.items()] + uri = uri.replace("async+mongodb", "mongodb", 1) + + super().__init__(uri, **options) + + self.dependency = self.dependencies["motor.motor_asyncio"] + self.proxy_dependency = self.dependencies["pymongo"] + + self.storage = self.dependency.module.AsyncIOMotorClient(uri, **mongo_opts) + # TODO: Fix this hack. It was noticed when running a benchmark + # with FastAPI - however - doesn't appear in unit tests or in an isolated + # use. Reference: https://jira.mongodb.org/browse/MOTOR-822 + self.storage.get_io_loop = asyncio.get_running_loop + + self.__database_name = database_name + self.__indices_created = False + + @property + def database(self): # type: ignore + return self.storage.get_database(self.__database_name) + + async def create_indices(self) -> None: + if not self.__indices_created: + await asyncio.gather( + self.database.counters.create_index("expireAt", expireAfterSeconds=0), + self.database.windows.create_index("expireAt", expireAfterSeconds=0), + ) + self.__indices_created = True + + async def reset(self) -> Optional[int]: + """ + Delete all rate limit keys in the rate limit collections (counters, windows) + """ + num_keys = sum( + await asyncio.gather( + self.database.counters.count_documents({}), + self.database.windows.count_documents({}), + ) + ) + await asyncio.gather( + self.database.counters.drop(), self.database.windows.drop() + ) + + return num_keys + + async def clear(self, key: str) -> None: + """ + :param key: the key to clear rate limits for + """ + await asyncio.gather( + self.database.counters.find_one_and_delete({"_id": key}), + self.database.windows.find_one_and_delete({"_id": key}), + ) + + async def get_expiry(self, key: str) -> int: + """ + :param key: the key to get the expiry for + """ + counter = await self.database.counters.find_one({"_id": key}) + expiry = counter["expireAt"] if counter else datetime.datetime.utcnow() + + return calendar.timegm(expiry.timetuple()) + + async def get(self, key: str) -> int: + """ + :param key: the key to get the counter value for + """ + counter = await self.database.counters.find_one( + {"_id": key, "expireAt": {"$gte": datetime.datetime.utcnow()}}, + projection=["count"], + ) + + return counter and counter["count"] or 0 + + async def incr( + self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1 + ) -> int: + """ + increments the counter for a given rate limit key + + :param key: the key to increment + :param expiry: amount in seconds for the key to expire in + :param elastic_expiry: whether to keep extending the rate limit + window every hit. + :param amount: the number to increment by + """ + await self.create_indices() + + expiration = datetime.datetime.utcnow() + datetime.timedelta(seconds=expiry) + + response = await self.database.counters.find_one_and_update( + {"_id": key}, + [ + { + "$set": { + "count": { + "$cond": { + "if": {"$lt": ["$expireAt", "$$NOW"]}, + "then": amount, + "else": {"$add": ["$count", amount]}, + } + }, + "expireAt": { + "$cond": { + "if": {"$lt": ["$expireAt", "$$NOW"]}, + "then": expiration, + "else": (expiration if elastic_expiry else "$expireAt"), + } + }, + } + }, + ], + upsert=True, + projection=["count"], + return_document=self.proxy_dependency.module.ReturnDocument.AFTER, + ) + + return int(response["count"]) + + async def check(self) -> bool: + """ + Check if storage is healthy by calling + :meth:`motor.motor_asyncio.AsyncIOMotorClient.server_info` + """ + try: + await self.storage.server_info() + + return True + except: # noqa: E722 + return False + + async def get_moving_window( + self, key: str, limit: int, expiry: int + ) -> Tuple[int, int]: + """ + returns the starting point and the number of entries in the moving + window + + :param str key: rate limit key + :param int expiry: expiry of entry + :return: (start of window, number of acquired entries) + """ + timestamp = time.time() + result = await self.database.windows.aggregate( + [ + {"$match": {"_id": key}}, + { + "$project": { + "entries": { + "$filter": { + "input": "$entries", + "as": "entry", + "cond": {"$gte": ["$$entry", timestamp - expiry]}, + } + } + } + }, + {"$unwind": "$entries"}, + { + "$group": { + "_id": "$_id", + "max": {"$max": "$entries"}, + "count": {"$sum": 1}, + } + }, + ] + ).to_list(length=1) + + if result: + return (int(result[0]["max"]), result[0]["count"]) + + return (int(timestamp), 0) + + async def acquire_entry( + self, key: str, limit: int, expiry: int, amount: int = 1 + ) -> bool: + """ + :param key: rate limit key to acquire an entry in + :param limit: amount of entries allowed + :param expiry: expiry of the entry + :param amount: the number of entries to acquire + """ + await self.create_indices() + + timestamp = time.time() + try: + updates: Dict[str, Any] = { # type: ignore + "$push": {"entries": {"$each": [], "$position": 0, "$slice": limit}} + } + + updates["$set"] = { + "expireAt": ( + datetime.datetime.utcnow() + datetime.timedelta(seconds=expiry) + ) + } + updates["$push"]["entries"]["$each"] = [timestamp] * amount + await self.database.windows.update_one( + { + "_id": key, + "entries.%d" + % (limit - amount): {"$not": {"$gte": timestamp - expiry}}, + }, + updates, + upsert=True, + ) + + return True + except self.proxy_dependency.module.errors.DuplicateKeyError: + return False diff --git a/venv/lib/python3.10/site-packages/limits/aio/storage/redis.py b/venv/lib/python3.10/site-packages/limits/aio/storage/redis.py new file mode 100644 index 0000000..9e78710 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/aio/storage/redis.py @@ -0,0 +1,438 @@ +import time +import urllib +from typing import TYPE_CHECKING, cast + +from deprecated.sphinx import versionadded +from packaging.version import Version + +from limits.aio.storage.base import MovingWindowSupport, Storage +from limits.errors import ConfigurationError +from limits.typing import AsyncRedisClient, Dict, Optional, Tuple, Union +from limits.util import get_package_data + +if TYPE_CHECKING: + import coredis + import coredis.commands + + +class RedisInteractor: + RES_DIR = "resources/redis/lua_scripts" + + SCRIPT_MOVING_WINDOW = get_package_data(f"{RES_DIR}/moving_window.lua") + SCRIPT_ACQUIRE_MOVING_WINDOW = get_package_data( + f"{RES_DIR}/acquire_moving_window.lua" + ) + SCRIPT_CLEAR_KEYS = get_package_data(f"{RES_DIR}/clear_keys.lua") + SCRIPT_INCR_EXPIRE = get_package_data(f"{RES_DIR}/incr_expire.lua") + + lua_moving_window: "coredis.commands.Script[bytes]" + lua_acquire_window: "coredis.commands.Script[bytes]" + lua_clear_keys: "coredis.commands.Script[bytes]" + lua_incr_expire: "coredis.commands.Script[bytes]" + + async def _incr( + self, + key: str, + expiry: int, + connection: AsyncRedisClient, + elastic_expiry: bool = False, + amount: int = 1, + ) -> int: + """ + increments the counter for a given rate limit key + + :param connection: Redis connection + :param key: the key to increment + :param expiry: amount in seconds for the key to expire in + :param amount: the number to increment by + """ + value = await connection.incrby(key, amount) + + if elastic_expiry or value == amount: + await connection.expire(key, expiry) + + return value + + async def _get(self, key: str, connection: AsyncRedisClient) -> int: + """ + :param connection: Redis connection + :param key: the key to get the counter value for + """ + + return int(await connection.get(key) or 0) + + async def _clear(self, key: str, connection: AsyncRedisClient) -> None: + """ + :param key: the key to clear rate limits for + :param connection: Redis connection + """ + await connection.delete([key]) + + async def get_moving_window( + self, key: str, limit: int, expiry: int + ) -> Tuple[int, int]: + """ + returns the starting point and the number of entries in the moving + window + + :param key: rate limit key + :param expiry: expiry of entry + :return: (start of window, number of acquired entries) + """ + timestamp = int(time.time()) + window = await self.lua_moving_window.execute( + [key], [int(timestamp - expiry), limit] + ) + if window: + return tuple(window) # type: ignore + return timestamp, 0 + + async def _acquire_entry( + self, + key: str, + limit: int, + expiry: int, + connection: AsyncRedisClient, + amount: int = 1, + ) -> bool: + """ + :param key: rate limit key to acquire an entry in + :param limit: amount of entries allowed + :param expiry: expiry of the entry + :param connection: Redis connection + """ + timestamp = time.time() + acquired = await self.lua_acquire_window.execute( + [key], [timestamp, limit, expiry, amount] + ) + + return bool(acquired) + + async def _get_expiry(self, key: str, connection: AsyncRedisClient) -> int: + """ + :param key: the key to get the expiry for + :param connection: Redis connection + """ + + return int(max(await connection.ttl(key), 0) + time.time()) + + async def _check(self, connection: AsyncRedisClient) -> bool: + """ + check if storage is healthy + + :param connection: Redis connection + """ + try: + await connection.ping() + + return True + except: # noqa + return False + + +@versionadded(version="2.1") +class RedisStorage(RedisInteractor, Storage, MovingWindowSupport): + """ + Rate limit storage with redis as backend. + + Depends on :pypi:`coredis` + """ + + STORAGE_SCHEME = ["async+redis", "async+rediss", "async+redis+unix"] + """ + The storage schemes for redis to be used in an async context + """ + DEPENDENCIES = {"coredis": Version("3.4.0")} + + def __init__( + self, + uri: str, + connection_pool: Optional["coredis.ConnectionPool"] = None, + **options: Union[float, str, bool], + ) -> None: + """ + :param uri: uri of the form: + + - ``async+redis://[:password]@host:port`` + - ``async+redis://[:password]@host:port/db`` + - ``async+rediss://[:password]@host:port`` + - ``async+unix:///path/to/sock`` etc... + + This uri is passed directly to :meth:`coredis.Redis.from_url` with + the initial ``async`` removed, except for the case of ``async+redis+unix`` + where it is replaced with ``unix``. + :param connection_pool: if provided, the redis client is initialized with + the connection pool and any other params passed as :paramref:`options` + :param options: all remaining keyword arguments are passed + directly to the constructor of :class:`coredis.Redis` + :raise ConfigurationError: when the redis library is not available + """ + uri = uri.replace("async+redis", "redis", 1) + uri = uri.replace("redis+unix", "unix") + + super().__init__(uri, **options) + + self.dependency = self.dependencies["coredis"].module + + if connection_pool: + self.storage = self.dependency.Redis( + connection_pool=connection_pool, **options + ) + else: + self.storage = self.dependency.Redis.from_url(uri, **options) + + self.initialize_storage(uri) + + def initialize_storage(self, _uri: str) -> None: + # all these methods are coroutines, so must be called with await + self.lua_moving_window = self.storage.register_script(self.SCRIPT_MOVING_WINDOW) + self.lua_acquire_window = self.storage.register_script( + self.SCRIPT_ACQUIRE_MOVING_WINDOW + ) + self.lua_clear_keys = self.storage.register_script(self.SCRIPT_CLEAR_KEYS) + self.lua_incr_expire = self.storage.register_script( + RedisStorage.SCRIPT_INCR_EXPIRE + ) + + async def incr( + self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1 + ) -> int: + """ + increments the counter for a given rate limit key + + :param key: the key to increment + :param expiry: amount in seconds for the key to expire in + :param amount: the number to increment by + """ + + if elastic_expiry: + return await super()._incr( + key, expiry, self.storage, elastic_expiry, amount + ) + else: + return cast( + int, await self.lua_incr_expire.execute([key], [expiry, amount]) + ) + + async def get(self, key: str) -> int: + """ + :param key: the key to get the counter value for + """ + + return await super()._get(key, self.storage) + + async def clear(self, key: str) -> None: + """ + :param key: the key to clear rate limits for + """ + + return await super()._clear(key, self.storage) + + async def acquire_entry( + self, key: str, limit: int, expiry: int, amount: int = 1 + ) -> bool: + """ + :param key: rate limit key to acquire an entry in + :param limit: amount of entries allowed + :param expiry: expiry of the entry + :param amount: the number of entries to acquire + """ + + return await super()._acquire_entry(key, limit, expiry, self.storage, amount) + + async def get_expiry(self, key: str) -> int: + """ + :param key: the key to get the expiry for + """ + + return await super()._get_expiry(key, self.storage) + + async def check(self) -> bool: + """ + Check if storage is healthy by calling :meth:`coredis.Redis.ping` + """ + + return await super()._check(self.storage) + + async def reset(self) -> Optional[int]: + """ + This function calls a Lua Script to delete keys prefixed with 'LIMITER' + in block of 5000. + + .. warning:: This operation was designed to be fast, but was not tested + on a large production based system. Be careful with its usage as it + could be slow on very large data sets. + """ + + return cast(int, await self.lua_clear_keys.execute(["LIMITER*"])) + + +@versionadded(version="2.1") +class RedisClusterStorage(RedisStorage): + """ + Rate limit storage with redis cluster as backend + + Depends on :pypi:`coredis` + """ + + STORAGE_SCHEME = ["async+redis+cluster"] + """ + The storage schemes for redis cluster to be used in an async context + """ + + DEFAULT_OPTIONS: Dict[str, Union[float, str, bool]] = { + "max_connections": 1000, + } + "Default options passed to :class:`coredis.RedisCluster`" + + def __init__(self, uri: str, **options: Union[float, str, bool]) -> None: + """ + :param uri: url of the form + ``async+redis+cluster://[:password]@host:port,host:port`` + :param options: all remaining keyword arguments are passed + directly to the constructor of :class:`coredis.RedisCluster` + :raise ConfigurationError: when the coredis library is not + available or if the redis host cannot be pinged. + """ + parsed = urllib.parse.urlparse(uri) + cluster_hosts = [] + + for loc in parsed.netloc.split(","): + host, port = loc.split(":") + cluster_hosts.append({"host": host, "port": int(port)}) + + super(RedisStorage, self).__init__(uri, **options) + + self.dependency = self.dependencies["coredis"].module + + self.storage: "coredis.RedisCluster[str]" = self.dependency.RedisCluster( + startup_nodes=cluster_hosts, **{**self.DEFAULT_OPTIONS, **options} + ) + self.initialize_storage(uri) + + async def reset(self) -> Optional[int]: + """ + Redis Clusters are sharded and deleting across shards + can't be done atomically. Because of this, this reset loops over all + keys that are prefixed with 'LIMITER' and calls delete on them, one at + a time. + + .. warning:: This operation was not tested with extremely large data sets. + On a large production based system, care should be taken with its + usage as it could be slow on very large data sets + """ + + keys = await self.storage.keys("LIMITER*") + count = 0 + for key in keys: + count += await self.storage.delete([key]) + return count + + +@versionadded(version="2.1") +class RedisSentinelStorage(RedisStorage): + """ + Rate limit storage with redis sentinel as backend + + Depends on :pypi:`coredis` + """ + + STORAGE_SCHEME = ["async+redis+sentinel"] + """The storage scheme for redis accessed via a redis sentinel installation""" + + DEFAULT_OPTIONS: Dict[str, Union[float, str, bool]] = { + "stream_timeout": 0.2, + } + "Default options passed to :class:`~coredis.sentinel.Sentinel`" + + DEPENDENCIES = {"coredis.sentinel": Version("3.4.0")} + + def __init__( + self, + uri: str, + service_name: Optional[str] = None, + use_replicas: bool = True, + sentinel_kwargs: Optional[Dict[str, Union[float, str, bool]]] = None, + **options: Union[float, str, bool], + ): + """ + :param uri: url of the form + ``async+redis+sentinel://host:port,host:port/service_name`` + :param service_name, optional: sentinel service name + (if not provided in `uri`) + :param use_replicas: Whether to use replicas for read only operations + :param sentinel_kwargs, optional: kwargs to pass as + ``sentinel_kwargs`` to :class:`coredis.sentinel.Sentinel` + :param options: all remaining keyword arguments are passed + directly to the constructor of :class:`coredis.sentinel.Sentinel` + :raise ConfigurationError: when the coredis library is not available + or if the redis primary host cannot be pinged. + """ + + parsed = urllib.parse.urlparse(uri) + sentinel_configuration = [] + connection_options = options.copy() + sentinel_options = sentinel_kwargs.copy() if sentinel_kwargs else {} + parsed_auth: Dict[str, Union[float, str, bool]] = {} + + if parsed.username: + parsed_auth["username"] = parsed.username + + if parsed.password: + parsed_auth["password"] = parsed.password + + sep = parsed.netloc.find("@") + 1 + + for loc in parsed.netloc[sep:].split(","): + host, port = loc.split(":") + sentinel_configuration.append((host, int(port))) + self.service_name = ( + parsed.path.replace("/", "") if parsed.path else service_name + ) + + if self.service_name is None: + raise ConfigurationError("'service_name' not provided") + + connection_options.setdefault("stream_timeout", 0.2) + + super(RedisStorage, self).__init__() + + self.dependency = self.dependencies["coredis.sentinel"].module + + self.sentinel = self.dependency.Sentinel( + sentinel_configuration, + sentinel_kwargs={**parsed_auth, **sentinel_options}, + **{**parsed_auth, **connection_options}, + ) + self.storage = self.sentinel.primary_for(self.service_name) + self.storage_replica = self.sentinel.replica_for(self.service_name) + self.use_replicas = use_replicas + self.initialize_storage(uri) + + async def get(self, key: str) -> int: + """ + :param key: the key to get the counter value for + """ + + return await super()._get( + key, self.storage_replica if self.use_replicas else self.storage + ) + + async def get_expiry(self, key: str) -> int: + """ + :param key: the key to get the expiry for + """ + + return await super()._get_expiry( + key, self.storage_replica if self.use_replicas else self.storage + ) + + async def check(self) -> bool: + """ + Check if storage is healthy by calling :meth:`coredis.StrictRedis.ping` + on the replica. + """ + + return await super()._check( + self.storage_replica if self.use_replicas else self.storage + ) diff --git a/venv/lib/python3.10/site-packages/limits/aio/strategies.py b/venv/lib/python3.10/site-packages/limits/aio/strategies.py new file mode 100644 index 0000000..79e03f4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/aio/strategies.py @@ -0,0 +1,209 @@ +""" +Asynchronous rate limiting strategies +""" + +import weakref +from abc import ABC, abstractmethod +from typing import Tuple, cast + +from ..limits import RateLimitItem +from ..storage import StorageTypes +from .storage import MovingWindowSupport, Storage + + +class RateLimiter(ABC): + def __init__(self, storage: StorageTypes): + assert isinstance(storage, Storage) + self.storage: Storage = weakref.proxy(storage) + + @abstractmethod + async def hit(self, item: RateLimitItem, *identifiers: str, cost: int = 1) -> bool: + """ + Consume the rate limit + + :param item: the rate limit item + :param identifiers: variable list of strings to uniquely identify the + limit + :param cost: The cost of this hit, default 1 + """ + raise NotImplementedError + + @abstractmethod + async def test(self, item: RateLimitItem, *identifiers: str) -> bool: + """ + Check if the rate limit can be consumed + + :param item: the rate limit item + :param identifiers: variable list of strings to uniquely identify the + limit + """ + raise NotImplementedError + + @abstractmethod + async def get_window_stats( + self, item: RateLimitItem, *identifiers: str + ) -> Tuple[int, int]: + """ + Query the reset time and remaining amount for the limit + + :param item: the rate limit item + :param identifiers: variable list of strings to uniquely identify the + limit + :return: (reset time, remaining)) + """ + raise NotImplementedError + + async def clear(self, item: RateLimitItem, *identifiers: str) -> None: + return await self.storage.clear(item.key_for(*identifiers)) + + +class MovingWindowRateLimiter(RateLimiter): + """ + Reference: :ref:`strategies:moving window` + """ + + def __init__(self, storage: StorageTypes) -> None: + if not ( + hasattr(storage, "acquire_entry") or hasattr(storage, "get_moving_window") + ): + raise NotImplementedError( + "MovingWindowRateLimiting is not implemented for storage " + "of type %s" % storage.__class__ + ) + super().__init__(storage) + + async def hit(self, item: RateLimitItem, *identifiers: str, cost: int = 1) -> bool: + """ + Consume the rate limit + + :param item: the rate limit item + :param identifiers: variable list of strings to uniquely identify the + limit + :param cost: The cost of this hit, default 1 + """ + + return await cast(MovingWindowSupport, self.storage).acquire_entry( + item.key_for(*identifiers), item.amount, item.get_expiry(), amount=cost + ) + + async def test(self, item: RateLimitItem, *identifiers: str) -> bool: + """ + Check if the rate limit can be consumed + + :param item: the rate limit item + :param identifiers: variable list of strings to uniquely identify the + limit + """ + res = await cast(MovingWindowSupport, self.storage).get_moving_window( + item.key_for(*identifiers), + item.amount, + item.get_expiry(), + ) + amount = res[1] + + return amount < item.amount + + async def get_window_stats( + self, item: RateLimitItem, *identifiers: str + ) -> Tuple[int, int]: + """ + returns the number of requests remaining within this limit. + + :param item: the rate limit item + :param identifiers: variable list of strings to uniquely identify the + limit + :return: (reset time, remaining) + """ + window_start, window_items = await cast( + MovingWindowSupport, self.storage + ).get_moving_window(item.key_for(*identifiers), item.amount, item.get_expiry()) + reset = window_start + item.get_expiry() + + return reset, item.amount - window_items + + +class FixedWindowRateLimiter(RateLimiter): + """ + Reference: :ref:`strategies:fixed window` + """ + + async def hit(self, item: RateLimitItem, *identifiers: str, cost: int = 1) -> bool: + """ + Consume the rate limit + + :param item: the rate limit item + :param identifiers: variable list of strings to uniquely identify the + limit + :param cost: The cost of this hit, default 1 + """ + + return ( + await self.storage.incr( + item.key_for(*identifiers), + item.get_expiry(), + elastic_expiry=False, + amount=cost, + ) + <= item.amount + ) + + async def test(self, item: RateLimitItem, *identifiers: str) -> bool: + """ + Check if the rate limit can be consumed + + :param item: the rate limit item + :param identifiers: variable list of strings to uniquely identify the + limit + """ + + return await self.storage.get(item.key_for(*identifiers)) < item.amount + + async def get_window_stats( + self, item: RateLimitItem, *identifiers: str + ) -> Tuple[int, int]: + """ + Query the reset time and remaining amount for the limit + + :param item: the rate limit item + :param identifiers: variable list of strings to uniquely identify the + limit + :return: reset time, remaining + """ + remaining = max( + 0, + item.amount - await self.storage.get(item.key_for(*identifiers)), + ) + reset = await self.storage.get_expiry(item.key_for(*identifiers)) + + return reset, remaining + + +class FixedWindowElasticExpiryRateLimiter(FixedWindowRateLimiter): + """ + Reference: :ref:`strategies:fixed window with elastic expiry` + """ + + async def hit(self, item: RateLimitItem, *identifiers: str, cost: int = 1) -> bool: + """ + Consume the rate limit + + :param item: a :class:`limits.limits.RateLimitItem` instance + :param identifiers: variable list of strings to uniquely identify the + limit + :param cost: The cost of this hit, default 1 + """ + amount = await self.storage.incr( + item.key_for(*identifiers), + item.get_expiry(), + elastic_expiry=True, + amount=cost, + ) + + return amount <= item.amount + + +STRATEGIES = { + "fixed-window": FixedWindowRateLimiter, + "fixed-window-elastic-expiry": FixedWindowElasticExpiryRateLimiter, + "moving-window": MovingWindowRateLimiter, +} diff --git a/venv/lib/python3.10/site-packages/limits/errors.py b/venv/lib/python3.10/site-packages/limits/errors.py new file mode 100644 index 0000000..b2115c4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/errors.py @@ -0,0 +1,9 @@ +""" +errors and exceptions +""" + + +class ConfigurationError(Exception): + """ + exception raised when a configuration problem is encountered + """ diff --git a/venv/lib/python3.10/site-packages/limits/limits.py b/venv/lib/python3.10/site-packages/limits/limits.py new file mode 100644 index 0000000..3346bd3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/limits.py @@ -0,0 +1,195 @@ +""" + +""" +from __future__ import annotations + +from functools import total_ordering +from typing import Dict, NamedTuple, Optional, Tuple, Type, Union, cast + +from limits.typing import ClassVar, List + + +def safe_string(value: Union[bytes, str, int]) -> str: + """ + converts a byte/str or int to a str + """ + + if isinstance(value, bytes): + return value.decode() + + return str(value) + + +class Granularity(NamedTuple): + seconds: int + name: str + + +TIME_TYPES = dict( + day=Granularity(60 * 60 * 24, "day"), + month=Granularity(60 * 60 * 24 * 30, "month"), + year=Granularity(60 * 60 * 24 * 30 * 12, "year"), + hour=Granularity(60 * 60, "hour"), + minute=Granularity(60, "minute"), + second=Granularity(1, "second"), +) + +GRANULARITIES: Dict[str, Type[RateLimitItem]] = {} + + +class RateLimitItemMeta(type): + def __new__( + cls, + name: str, + parents: Tuple[type, ...], + dct: Dict[str, Union[Granularity, List[str]]], + ) -> RateLimitItemMeta: + if "__slots__" not in dct: + dct["__slots__"] = [] + granularity = super().__new__(cls, name, parents, dct) + + if "GRANULARITY" in dct: + GRANULARITIES[dct["GRANULARITY"][1]] = cast( + Type[RateLimitItem], granularity + ) + + return granularity + + +# pylint: disable=no-member +@total_ordering +class RateLimitItem(metaclass=RateLimitItemMeta): + """ + defines a Rate limited resource which contains the characteristic + namespace, amount and granularity multiples of the rate limiting window. + + :param amount: the rate limit amount + :param multiples: multiple of the 'per' :attr:`GRANULARITY` + (e.g. 'n' per 'm' seconds) + :param namespace: category for the specific rate limit + """ + + __slots__ = ["namespace", "amount", "multiples"] + + GRANULARITY: ClassVar[Granularity] + """ + A tuple describing the granularity of this limit as + (number of seconds, name) + """ + + def __init__( + self, amount: int, multiples: Optional[int] = 1, namespace: str = "LIMITER" + ): + self.namespace = namespace + self.amount = int(amount) + self.multiples = int(multiples or 1) + + @classmethod + def check_granularity_string(cls, granularity_string: str) -> bool: + """ + Checks if this instance matches a *granularity_string* + of type ``n per hour``, ``n per minute`` etc, + by comparing with :attr:`GRANULARITY` + + """ + + return granularity_string.lower() in cls.GRANULARITY.name + + def get_expiry(self) -> int: + """ + :return: the duration the limit is enforced for in seconds. + """ + + return self.GRANULARITY.seconds * self.multiples + + def key_for(self, *identifiers: str) -> str: + """ + Constructs a key for the current limit and any additional + identifiers provided. + + :param identifiers: a list of strings to append to the key + :return: a string key identifying this resource with + each identifier appended with a '/' delimiter. + """ + remainder = "/".join( + [safe_string(k) for k in identifiers] + + [ + safe_string(self.amount), + safe_string(self.multiples), + self.GRANULARITY.name, + ] + ) + + return f"{self.namespace}/{remainder}" + + def __eq__(self, other: object) -> bool: + if isinstance(other, RateLimitItem): + return ( + self.amount == other.amount + and self.GRANULARITY == other.GRANULARITY + and self.multiples == other.multiples + ) + return False + + def __repr__(self) -> str: + return f"{self.amount} per {self.multiples} {self.GRANULARITY.name}" + + def __lt__(self, other: RateLimitItem) -> bool: + return self.GRANULARITY.seconds < other.GRANULARITY.seconds + + def __hash__(self) -> int: + return hash((self.namespace, self.amount, self.multiples, self.GRANULARITY)) + + +class RateLimitItemPerYear(RateLimitItem): + """ + per year rate limited resource. + """ + + GRANULARITY = TIME_TYPES["year"] + """A year""" + + +class RateLimitItemPerMonth(RateLimitItem): + """ + per month rate limited resource. + """ + + GRANULARITY = TIME_TYPES["month"] + """A month""" + + +class RateLimitItemPerDay(RateLimitItem): + """ + per day rate limited resource. + """ + + GRANULARITY = TIME_TYPES["day"] + """A day""" + + +class RateLimitItemPerHour(RateLimitItem): + """ + per hour rate limited resource. + """ + + GRANULARITY = TIME_TYPES["hour"] + """An hour""" + + +class RateLimitItemPerMinute(RateLimitItem): + """ + per minute rate limited resource. + """ + + GRANULARITY = TIME_TYPES["minute"] + """A minute""" + + +class RateLimitItemPerSecond(RateLimitItem): + """ + per second rate limited resource. + """ + + GRANULARITY = TIME_TYPES["second"] + """A second""" diff --git a/venv/lib/python3.10/site-packages/limits/py.typed b/venv/lib/python3.10/site-packages/limits/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/acquire_moving_window.lua b/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/acquire_moving_window.lua new file mode 100644 index 0000000..9d2d7ad --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/acquire_moving_window.lua @@ -0,0 +1,18 @@ +local amount = tonumber(ARGV[4]) +local entry = redis.call('lindex', KEYS[1], tonumber(ARGV[2]) - amount) +local timestamp = tonumber(ARGV[1]) +local expiry = tonumber(ARGV[3]) + +if entry and tonumber(entry) >= timestamp - expiry then + return false +end +local limit = tonumber(ARGV[2]) +local entries= {} +for i=1, amount do + entries[i] = timestamp +end +redis.call('lpush', KEYS[1], unpack(entries)) +redis.call('ltrim', KEYS[1], 0, limit - 1) +redis.call('expire', KEYS[1], expiry) + +return true diff --git a/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/clear_keys.lua b/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/clear_keys.lua new file mode 100644 index 0000000..7d6ff7e --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/clear_keys.lua @@ -0,0 +1,10 @@ +local keys = redis.call('keys', KEYS[1]) +local res = 0 + +for i=1,#keys,5000 do + res = res + redis.call( + 'del', unpack(keys, i, math.min(i+4999, #keys)) + ) +end + +return res diff --git a/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/gcra_consume.lua b/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/gcra_consume.lua new file mode 100644 index 0000000..9dc679d --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/gcra_consume.lua @@ -0,0 +1,50 @@ +local rate_limit_key = KEYS[1] +local burst = ARGV[1] +local rate = ARGV[2] +local period = ARGV[3] +local cost = ARGV[4] + +local emission_interval = period / rate +local increment = emission_interval * cost +local burst_offset = emission_interval * burst + +local tat = redis.call("GET", rate_limit_key) + +local time = redis.call("TIME") +local now = tonumber(time[1]) + tonumber(time[2])/1000000.0 + +if not tat then + tat = now +else + tat = tonumber(tat) +end + +tat = math.max(tat, now) + +local new_tat = tat + increment +local allow_at = new_tat - burst_offset +local diff = now - allow_at + +local consumed = 0 +local retry_in = 0 +local reset_in + +local remaining = math.floor(diff / emission_interval) -- poor man's round +if remaining < 0 then + consumed = 0 + remaining = math.floor((now - (tat - burst_offset)) / emission_interval) + reset_in = math.ceil(tat - now) + retry_in = math.ceil(diff * -1) +elseif remaining == 0 and increment <= 0 then + consumed = 1 + remaining = 0 + reset_in = math.ceil(tat - now) +else + consumed = 1 + reset_in = math.ceil(new_tat - now) + if increment > 0 then + redis.call("SET", rate_limit_key, new_tat, "PX", reset_in) + end +end + +return {consumed, remaining, retry_in, reset_in} \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/incr_expire.lua b/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/incr_expire.lua new file mode 100644 index 0000000..062dffb --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/incr_expire.lua @@ -0,0 +1,9 @@ +local current +local amount = tonumber(ARGV[2]) +current = redis.call("incrby", KEYS[1], amount) + +if tonumber(current) == amount then + redis.call("expire", KEYS[1], ARGV[1]) +end + +return current diff --git a/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/moving_window.lua b/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/moving_window.lua new file mode 100644 index 0000000..94b5a11 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/resources/redis/lua_scripts/moving_window.lua @@ -0,0 +1,18 @@ +local items = redis.call('lrange', KEYS[1], 0, tonumber(ARGV[2])) +local expiry = tonumber(ARGV[1]) +local a = 0 +local oldest = nil + +for idx=1,#items do + if tonumber(items[idx]) >= expiry then + a = a + 1 + + if oldest == nil then + oldest = tonumber(items[idx]) + end + else + break + end +end + +return {oldest, a} diff --git a/venv/lib/python3.10/site-packages/limits/storage/__init__.py b/venv/lib/python3.10/site-packages/limits/storage/__init__.py new file mode 100644 index 0000000..68726b2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/storage/__init__.py @@ -0,0 +1,77 @@ +""" +Implementations of storage backends to be used with +:class:`limits.strategies.RateLimiter` strategies +""" + +import urllib +from typing import Union, cast + +import limits + +from ..errors import ConfigurationError +from .base import MovingWindowSupport, Storage +from .gae_memcached import GAEMemcachedStorage +from .memcached import MemcachedStorage +from .memory import MemoryStorage +from .mongodb import MongoDBStorage +from .redis import RedisStorage +from .redis_cluster import RedisClusterStorage +from .redis_sentinel import RedisSentinelStorage +from .registry import SCHEMES + +StorageTypes = Union[Storage, "limits.aio.storage.Storage"] + + +def storage_from_string( + storage_string: str, **options: Union[float, str, bool] +) -> StorageTypes: + """ + Factory function to get an instance of the storage class based + on the uri of the storage. In most cases using it should be sufficient + instead of directly instantiating the storage classes. for example:: + + from limits.storage import storage_from_string + + memory = from_string("memory://") + memcached = from_string("memcached://localhost:11211") + redis = from_string("redis://localhost:6379") + + The same function can be used to construct the :ref:`storage:async storage` + variants, for example:: + + from limits.storage import storage_from_string + + memory = storage_from_string("async+memory://") + memcached = storage_from_string("async+memcached://localhost:11211") + redis = storage_from_string("async+redis://localhost:6379") + + :param storage_string: a string of the form ``scheme://host:port``. + More details about supported storage schemes can be found at + :ref:`storage:storage scheme` + :param options: all remaining keyword arguments are passed to the + constructor matched by :paramref:`storage_string`. + :raises ConfigurationError: when the :attr:`storage_string` cannot be + mapped to a registered :class:`limits.storage.Storage` + or :class:`limits.aio.storage.Storage` instance. + + + """ + scheme = urllib.parse.urlparse(storage_string).scheme + + if scheme not in SCHEMES: + raise ConfigurationError("unknown storage scheme : %s" % storage_string) + return cast(StorageTypes, SCHEMES[scheme](storage_string, **options)) + + +__all__ = [ + "storage_from_string", + "Storage", + "MovingWindowSupport", + "MemoryStorage", + "MongoDBStorage", + "RedisStorage", + "RedisClusterStorage", + "RedisSentinelStorage", + "MemcachedStorage", + "GAEMemcachedStorage", +] diff --git a/venv/lib/python3.10/site-packages/limits/storage/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..fc0b5ed Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/storage/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000..f08d4e8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/base.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/storage/__pycache__/gae_memcached.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/gae_memcached.cpython-310.pyc new file mode 100644 index 0000000..1e3442f Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/gae_memcached.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/storage/__pycache__/memcached.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/memcached.cpython-310.pyc new file mode 100644 index 0000000..60c99f8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/memcached.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/storage/__pycache__/memory.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/memory.cpython-310.pyc new file mode 100644 index 0000000..122e714 Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/memory.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/storage/__pycache__/mongodb.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/mongodb.cpython-310.pyc new file mode 100644 index 0000000..5890637 Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/mongodb.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/storage/__pycache__/redis.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/redis.cpython-310.pyc new file mode 100644 index 0000000..22eb092 Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/redis.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/storage/__pycache__/redis_cluster.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/redis_cluster.cpython-310.pyc new file mode 100644 index 0000000..d08fb3f Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/redis_cluster.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/storage/__pycache__/redis_sentinel.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/redis_sentinel.cpython-310.pyc new file mode 100644 index 0000000..b3099ea Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/redis_sentinel.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/storage/__pycache__/registry.cpython-310.pyc b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/registry.cpython-310.pyc new file mode 100644 index 0000000..6414c73 Binary files /dev/null and b/venv/lib/python3.10/site-packages/limits/storage/__pycache__/registry.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/limits/storage/base.py b/venv/lib/python3.10/site-packages/limits/storage/base.py new file mode 100644 index 0000000..345e78b --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/storage/base.py @@ -0,0 +1,98 @@ +import threading +from abc import ABC, abstractmethod + +from limits.storage.registry import StorageRegistry +from limits.typing import List, Optional, Tuple, Union +from limits.util import LazyDependency + + +class Storage(LazyDependency, metaclass=StorageRegistry): + """ + Base class to extend when implementing a storage backend. + """ + + STORAGE_SCHEME: Optional[List[str]] + """The storage schemes to register against this implementation""" + + def __init__(self, uri: Optional[str] = None, **options: Union[float, str, bool]): + self.lock = threading.RLock() + super().__init__() + + @abstractmethod + def incr( + self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1 + ) -> int: + """ + increments the counter for a given rate limit key + + :param key: the key to increment + :param expiry: amount in seconds for the key to expire in + :param elastic_expiry: whether to keep extending the rate limit + window every hit. + :param amount: the number to increment by + """ + raise NotImplementedError + + @abstractmethod + def get(self, key: str) -> int: + """ + :param key: the key to get the counter value for + """ + raise NotImplementedError + + @abstractmethod + def get_expiry(self, key: str) -> int: + """ + :param key: the key to get the expiry for + """ + raise NotImplementedError + + @abstractmethod + def check(self) -> bool: + """ + check if storage is healthy + """ + raise NotImplementedError + + @abstractmethod + def reset(self) -> Optional[int]: + """ + reset storage to clear limits + """ + raise NotImplementedError + + @abstractmethod + def clear(self, key: str) -> None: + """ + resets the rate limit key + + :param key: the key to clear rate limits for + """ + raise NotImplementedError + + +class MovingWindowSupport(ABC): + """ + Abstract base for storages that intend to support + the moving window strategy + """ + + def acquire_entry(self, key: str, limit: int, expiry: int, amount: int = 1) -> bool: + """ + :param key: rate limit key to acquire an entry in + :param limit: amount of entries allowed + :param expiry: expiry of the entry + :param amount: the number of entries to acquire + """ + raise NotImplementedError + + def get_moving_window(self, key: str, limit: int, expiry: int) -> Tuple[int, int]: + """ + returns the starting point and the number of entries in the moving + window + + :param key: rate limit key + :param expiry: expiry of entry + :return: (start of window, number of acquired entries) + """ + raise NotImplementedError diff --git a/venv/lib/python3.10/site-packages/limits/storage/gae_memcached.py b/venv/lib/python3.10/site-packages/limits/storage/gae_memcached.py new file mode 100644 index 0000000..2019006 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/storage/gae_memcached.py @@ -0,0 +1,75 @@ +import time + +from deprecated.sphinx import deprecated + +from limits.storage.memcached import MemcachedStorage + + +@deprecated( + reason="""The implementation has not been tested at all +in version 2.0 (and for many years) and will be removed in limits 2.5""", + version="2.0", + action="once", +) +class GAEMemcachedStorage(MemcachedStorage): # noqa + """ + rate limit storage with GAE memcache as backend + """ + + MAX_CAS_RETRIES = 10 + STORAGE_SCHEME = ["gaememcached"] + + def __init__(self, uri: str, **options): + options["library"] = "google.appengine.api.memcache" + super().__init__(uri, **options) + + def incr( + self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1 + ): + """ + increments the counter for a given rate limit key + + :param key: the key to increment + :param expiry: amount in seconds for the key to expire in + :param elastic_expiry: whether to keep extending the rate limit + window every hit. + :param amount: the number to increment by + """ + + if not self.call_memcached_func(self.storage.add, key, amount, expiry): + if elastic_expiry: + # CAS id is set as state on the client object in GAE memcache + value = self.storage.gets(key) + retry = 0 + + while ( + not self.call_memcached_func( + self.storage.cas, key, int(value or 0) + amount, expiry + ) + and retry < self.MAX_CAS_RETRIES + ): + value = self.storage.gets(key) + retry += 1 + self.call_memcached_func( + self.storage.set, key + "/expires", expiry + time.time(), expiry + ) + + return int(value or 0) + amount + else: + return self.storage.incr(key, amount) + self.call_memcached_func( + self.storage.set, key + "/expires", expiry + time.time(), expiry + ) + + return 1 + + def check(self) -> bool: + """ + check if storage is healthy + """ + try: + self.call_memcached_func(self.storage.get_stats) + + return True + except: # noqa + return False diff --git a/venv/lib/python3.10/site-packages/limits/storage/memcached.py b/venv/lib/python3.10/site-packages/limits/storage/memcached.py new file mode 100644 index 0000000..6d1a660 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/storage/memcached.py @@ -0,0 +1,188 @@ +import inspect +import threading +import time +import urllib.parse +from types import ModuleType +from typing import cast + +from limits.errors import ConfigurationError +from limits.storage.base import Storage +from limits.typing import Callable, List, MemcachedClientP, Optional, P, R, Tuple, Union +from limits.util import get_dependency + + +class MemcachedStorage(Storage): + """ + Rate limit storage with memcached as backend. + + Depends on :pypi:`pymemcache`. + """ + + STORAGE_SCHEME = ["memcached"] + """The storage scheme for memcached""" + + def __init__( + self, + uri: str, + **options: Union[str, Callable[[], MemcachedClientP]], + ) -> None: + """ + :param uri: memcached location of the form + ``memcached://host:port,host:port``, + ``memcached:///var/tmp/path/to/sock`` + :param options: all remaining keyword arguments are passed + directly to the constructor of :class:`pymemcache.client.base.PooledClient` + or :class:`pymemcache.client.hash.HashClient` (if there are more than + one hosts specified) + :raise ConfigurationError: when :pypi:`pymemcache` is not available + """ + parsed = urllib.parse.urlparse(uri) + self.hosts = [] + + for loc in parsed.netloc.strip().split(","): + if not loc: + continue + host, port = loc.split(":") + self.hosts.append((host, int(port))) + else: + # filesystem path to UDS + + if parsed.path and not parsed.netloc and not parsed.port: + self.hosts = [parsed.path] # type: ignore + + self.library = str(options.pop("library", "pymemcache.client")) + self.cluster_library = str( + options.pop("cluster_library", "pymemcache.client.hash") + ) + self.client_getter = cast( + Callable[[ModuleType, List[Tuple[str, int]]], MemcachedClientP], + options.pop("client_getter", self.get_client), + ) + self.options = options + + if not get_dependency(self.library): + raise ConfigurationError( + "memcached prerequisite not available." + " please install %s" % self.library + ) # pragma: no cover + self.local_storage = threading.local() + self.local_storage.storage = None + + def get_client( + self, module: ModuleType, hosts: List[Tuple[str, int]], **kwargs: str + ) -> MemcachedClientP: + """ + returns a memcached client. + + :param module: the memcached module + :param hosts: list of memcached hosts + """ + return cast( + MemcachedClientP, + module.HashClient(hosts, **kwargs) + if len(hosts) > 1 + else module.PooledClient(*hosts, **kwargs), + ) + + def call_memcached_func( + self, func: Callable[P, R], *args: P.args, **kwargs: P.kwargs + ) -> R: + if "noreply" in kwargs: + argspec = inspect.getfullargspec(func) + if not ("noreply" in argspec.args or argspec.varkw): + kwargs.pop("noreply") + + return func(*args, **kwargs) + + @property + def storage(self) -> MemcachedClientP: + """ + lazily creates a memcached client instance using a thread local + """ + + if not (hasattr(self.local_storage, "storage") and self.local_storage.storage): + dependency = get_dependency( + self.cluster_library if len(self.hosts) > 1 else self.library + )[0] + if not dependency: + raise ConfigurationError(f"Unable to import {self.cluster_library}") + self.local_storage.storage = self.client_getter( + dependency, self.hosts, **self.options + ) + + return cast(MemcachedClientP, self.local_storage.storage) + + def get(self, key: str) -> int: + """ + :param key: the key to get the counter value for + """ + + return int(self.storage.get(key) or 0) + + def clear(self, key: str) -> None: + """ + :param key: the key to clear rate limits for + """ + self.storage.delete(key) + + def incr( + self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1 + ) -> int: + """ + increments the counter for a given rate limit key + + :param key: the key to increment + :param expiry: amount in seconds for the key to expire in + :param elastic_expiry: whether to keep extending the rate limit + window every hit. + :param amount: the number to increment by + """ + + if not self.call_memcached_func( + self.storage.add, key, amount, expiry, noreply=False + ): + value = self.storage.incr(key, amount) or amount + + if elastic_expiry: + self.call_memcached_func(self.storage.touch, key, expiry) + self.call_memcached_func( + self.storage.set, + key + "/expires", + expiry + time.time(), + expire=expiry, + noreply=False, + ) + + return value + else: + self.call_memcached_func( + self.storage.set, + key + "/expires", + expiry + time.time(), + expire=expiry, + noreply=False, + ) + + return amount + + def get_expiry(self, key: str) -> int: + """ + :param key: the key to get the expiry for + """ + + return int(float(self.storage.get(key + "/expires") or time.time())) + + def check(self) -> bool: + """ + Check if storage is healthy by calling the ``get`` command + on the key ``limiter-check`` + """ + try: + self.call_memcached_func(self.storage.get, "limiter-check") + + return True + except: # noqa + return False + + def reset(self) -> Optional[int]: + raise NotImplementedError diff --git a/venv/lib/python3.10/site-packages/limits/storage/memory.py b/venv/lib/python3.10/site-packages/limits/storage/memory.py new file mode 100644 index 0000000..b53bbbc --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/storage/memory.py @@ -0,0 +1,164 @@ +import threading +import time +from collections import Counter + +import limits.typing +from limits.storage.base import MovingWindowSupport, Storage +from limits.typing import Dict, List, Optional, Tuple + + +class LockableEntry(threading._RLock): + def __init__(self, expiry: float) -> None: + self.atime = time.time() + self.expiry = self.atime + expiry + super().__init__() + + +class MemoryStorage(Storage, MovingWindowSupport): + """ + rate limit storage using :class:`collections.Counter` + as an in memory storage for fixed and elastic window strategies, + and a simple list to implement moving window strategy. + + """ + + STORAGE_SCHEME = ["memory"] + + def __init__(self, uri: Optional[str] = None, **_: str): + self.storage: limits.typing.Counter[str] = Counter() + self.expirations: Dict[str, float] = {} + self.events: Dict[str, List[LockableEntry]] = {} + self.timer = threading.Timer(0.01, self.__expire_events) + self.timer.start() + super().__init__(uri, **_) + + def __expire_events(self) -> None: + for key in list(self.events.keys()): + for event in list(self.events[key]): + with event: + if event.expiry <= time.time() and event in self.events[key]: + self.events[key].remove(event) + + for key in list(self.expirations.keys()): + if self.expirations[key] <= time.time(): + self.storage.pop(key, None) + self.expirations.pop(key, None) + + def __schedule_expiry(self) -> None: + if not self.timer.is_alive(): + self.timer = threading.Timer(0.01, self.__expire_events) + self.timer.start() + + def incr( + self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1 + ) -> int: + """ + increments the counter for a given rate limit key + + :param key: the key to increment + :param expiry: amount in seconds for the key to expire in + :param elastic_expiry: whether to keep extending the rate limit + window every hit. + :param amount: the number to increment by + """ + self.get(key) + self.__schedule_expiry() + self.storage[key] += amount + + if elastic_expiry or self.storage[key] == amount: + self.expirations[key] = time.time() + expiry + + return self.storage.get(key, 0) + + def get(self, key: str) -> int: + """ + :param key: the key to get the counter value for + """ + + if self.expirations.get(key, 0) <= time.time(): + self.storage.pop(key, None) + self.expirations.pop(key, None) + + return self.storage.get(key, 0) + + def clear(self, key: str) -> None: + """ + :param key: the key to clear rate limits for + """ + self.storage.pop(key, None) + self.expirations.pop(key, None) + self.events.pop(key, None) + + def acquire_entry(self, key: str, limit: int, expiry: int, amount: int = 1) -> bool: + """ + :param key: rate limit key to acquire an entry in + :param limit: amount of entries allowed + :param expiry: expiry of the entry + :param amount: the number of entries to acquire + """ + self.events.setdefault(key, []) + self.__schedule_expiry() + timestamp = time.time() + try: + entry = self.events[key][limit - amount] + except IndexError: + entry = None + + if entry and entry.atime >= timestamp - expiry: + return False + else: + self.events[key][:0] = [LockableEntry(expiry) for _ in range(amount)] + return True + + def get_expiry(self, key: str) -> int: + """ + :param key: the key to get the expiry for + """ + + return int(self.expirations.get(key, time.time())) + + def get_num_acquired(self, key: str, expiry: int) -> int: + """ + returns the number of entries already acquired + + :param key: rate limit key to acquire an entry in + :param expiry: expiry of the entry + """ + timestamp = time.time() + + return ( + len([k for k in self.events[key] if k.atime >= timestamp - expiry]) + if self.events.get(key) + else 0 + ) + + def get_moving_window(self, key: str, limit: int, expiry: int) -> Tuple[int, int]: + """ + returns the starting point and the number of entries in the moving + window + + :param key: rate limit key + :param expiry: expiry of entry + :return: (start of window, number of acquired entries) + """ + timestamp = time.time() + acquired = self.get_num_acquired(key, expiry) + + for item in self.events.get(key, []): + if item.atime >= timestamp - expiry: + return int(item.atime), acquired + + return int(timestamp), acquired + + def check(self) -> bool: + """ + check if storage is healthy + """ + + return True + + def reset(self) -> Optional[int]: + self.storage.clear() + self.expirations.clear() + self.events.clear() + return None diff --git a/venv/lib/python3.10/site-packages/limits/storage/mongodb.py b/venv/lib/python3.10/site-packages/limits/storage/mongodb.py new file mode 100644 index 0000000..cbef9e2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/storage/mongodb.py @@ -0,0 +1,230 @@ +from __future__ import annotations + +import calendar +import datetime +import time +from typing import TYPE_CHECKING, Any + +from deprecated.sphinx import versionadded + +from limits.typing import Dict, Optional, Tuple, Union + +from .base import MovingWindowSupport, Storage + +if TYPE_CHECKING: + import pymongo + + +@versionadded(version="2.1") +class MongoDBStorage(Storage, MovingWindowSupport): + """ + Rate limit storage with MongoDB as backend. + + Depends on :pypi:`pymongo`. + """ + + STORAGE_SCHEME = ["mongodb", "mongodb+srv"] + DEFAULT_OPTIONS: Dict[str, Union[int, str, bool]] = { + "serverSelectionTimeoutMS": 1000, + "socketTimeoutMS": 1000, + "connectTimeoutMS": 1000, + } + "Default options passed to :class:`~pymongo.mongo_client.MongoClient`" + + DEPENDENCIES = ["pymongo"] + + def __init__( + self, uri: str, database_name: str = "limits", **options: Union[int, str, bool] + ) -> None: + """ + :param uri: uri of the form ``mongodb://[user:password]@host:port?...``, + This uri is passed directly to :class:`~pymongo.mongo_client.MongoClient` + :param database_name: The database to use for storing the rate limit + collections. + :param options: all remaining keyword arguments are merged with + :data:`DEFAULT_OPTIONS` and passed to the constructor of + :class:`~pymongo.mongo_client.MongoClient` + :raise ConfigurationError: when the :pypi:`pymongo` library is not available + """ + + super().__init__(uri, **options) + + self.lib = self.dependencies["pymongo"].module + + mongo_opts = options.copy() + [mongo_opts.setdefault(k, v) for k, v in self.DEFAULT_OPTIONS.items()] + + self.storage: "pymongo.MongoClient" = self.lib.MongoClient(uri, **mongo_opts) + self.counters = self.storage.get_database(database_name).counters + self.windows = self.storage.get_database(database_name).windows + self.__initialize_database() + + def __initialize_database(self) -> None: + self.counters.create_index("expireAt", expireAfterSeconds=0) + self.windows.create_index("expireAt", expireAfterSeconds=0) + + def reset(self) -> Optional[int]: + """ + Delete all rate limit keys in the rate limit collections (counters, windows) + """ + num_keys = self.counters.count_documents({}) + self.windows.count_documents({}) + self.counters.drop() + self.windows.drop() + + return int(num_keys) + + def clear(self, key: str) -> None: + """ + :param key: the key to clear rate limits for + """ + self.counters.find_one_and_delete({"_id": key}) + self.windows.find_one_and_delete({"_id": key}) + + def get_expiry(self, key: str) -> int: + """ + :param key: the key to get the expiry for + """ + counter = self.counters.find_one({"_id": key}) + expiry = counter["expireAt"] if counter else datetime.datetime.utcnow() + + return calendar.timegm(expiry.timetuple()) + + def get(self, key: str) -> int: + """ + :param key: the key to get the counter value for + """ + counter = self.counters.find_one( + {"_id": key, "expireAt": {"$gte": datetime.datetime.utcnow()}}, + projection=["count"], + ) + + return counter and counter["count"] or 0 + + def incr( + self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1 + ) -> int: + """ + increments the counter for a given rate limit key + + :param key: the key to increment + :param expiry: amount in seconds for the key to expire in + :param amount: the number to increment by + """ + expiration = datetime.datetime.utcnow() + datetime.timedelta(seconds=expiry) + + return int( + self.counters.find_one_and_update( + {"_id": key}, + [ + { + "$set": { + "count": { + "$cond": { + "if": {"$lt": ["$expireAt", "$$NOW"]}, + "then": amount, + "else": {"$add": ["$count", amount]}, + } + }, + "expireAt": { + "$cond": { + "if": {"$lt": ["$expireAt", "$$NOW"]}, + "then": expiration, + "else": ( + expiration if elastic_expiry else "$expireAt" + ), + } + }, + } + }, + ], + upsert=True, + projection=["count"], + return_document=self.lib.ReturnDocument.AFTER, + )["count"] + ) + + def check(self) -> bool: + """ + Check if storage is healthy by calling :meth:`pymongo.mongo_client.MongoClient.server_info` + """ + try: + self.storage.server_info() + + return True + except: # noqa: E722 + return False + + def get_moving_window(self, key: str, limit: int, expiry: int) -> Tuple[int, int]: + """ + returns the starting point and the number of entries in the moving + window + + :param key: rate limit key + :param expiry: expiry of entry + :return: (start of window, number of acquired entries) + """ + timestamp = time.time() + result = list( + self.windows.aggregate( + [ + {"$match": {"_id": key}}, + { + "$project": { + "entries": { + "$filter": { + "input": "$entries", + "as": "entry", + "cond": {"$gte": ["$$entry", timestamp - expiry]}, + } + } + } + }, + {"$unwind": "$entries"}, + { + "$group": { + "_id": "$_id", + "max": {"$max": "$entries"}, + "count": {"$sum": 1}, + } + }, + ] + ) + ) + + if result: + return int(result[0]["max"]), result[0]["count"] + + return int(timestamp), 0 + + def acquire_entry(self, key: str, limit: int, expiry: int, amount: int = 1) -> bool: + """ + :param key: rate limit key to acquire an entry in + :param limit: amount of entries allowed + :param expiry: expiry of the entry + :param amount: the number of entries to acquire + """ + timestamp = time.time() + try: + updates: Dict[str, Any] = { # type: ignore + "$push": {"entries": {"$each": [], "$position": 0, "$slice": limit}} + } + + updates["$set"] = { + "expireAt": ( + datetime.datetime.utcnow() + datetime.timedelta(seconds=expiry) + ) + } + updates["$push"]["entries"]["$each"] = [timestamp] * amount + self.windows.update_one( + { + "_id": key, + "entries.%d" + % (limit - amount): {"$not": {"$gte": timestamp - expiry}}, + }, + updates, + upsert=True, + ) + + return True + except self.lib.errors.DuplicateKeyError: + return False diff --git a/venv/lib/python3.10/site-packages/limits/storage/redis.py b/venv/lib/python3.10/site-packages/limits/storage/redis.py new file mode 100644 index 0000000..860e497 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/storage/redis.py @@ -0,0 +1,238 @@ +from __future__ import annotations + +import time +from typing import TYPE_CHECKING + +from packaging.version import Version + +from limits.typing import Optional, RedisClient, ScriptP, Tuple, Union + +from ..util import get_package_data +from .base import MovingWindowSupport, Storage + +if TYPE_CHECKING: + import redis + + +class RedisInteractor: + RES_DIR = "resources/redis/lua_scripts" + + SCRIPT_MOVING_WINDOW = get_package_data(f"{RES_DIR}/moving_window.lua") + SCRIPT_ACQUIRE_MOVING_WINDOW = get_package_data( + f"{RES_DIR}/acquire_moving_window.lua" + ) + SCRIPT_CLEAR_KEYS = get_package_data(f"{RES_DIR}/clear_keys.lua") + SCRIPT_INCR_EXPIRE = get_package_data(f"{RES_DIR}/incr_expire.lua") + + lua_moving_window: ScriptP[Tuple[int, int]] + lua_acquire_window: ScriptP[bool] + + def get_moving_window(self, key: str, limit: int, expiry: int) -> Tuple[int, int]: + """ + returns the starting point and the number of entries in the moving + window + + :param key: rate limit key + :param expiry: expiry of entry + :return: (start of window, number of acquired entries) + """ + timestamp = time.time() + window = self.lua_moving_window([key], [int(timestamp - expiry), limit]) + + return window or (int(timestamp), 0) + + def _incr( + self, + key: str, + expiry: int, + connection: RedisClient, + elastic_expiry: bool = False, + amount: int = 1, + ) -> int: + """ + increments the counter for a given rate limit key + + :param connection: Redis connection + :param key: the key to increment + :param expiry: amount in seconds for the key to expire in + :param amount: the number to increment by + """ + value = connection.incrby(key, amount) + + if elastic_expiry or value == amount: + connection.expire(key, expiry) + + return value + + def _get(self, key: str, connection: RedisClient) -> int: + """ + :param connection: Redis connection + :param key: the key to get the counter value for + """ + + return int(connection.get(key) or 0) + + def _clear(self, key: str, connection: RedisClient) -> None: + """ + :param key: the key to clear rate limits for + :param connection: Redis connection + """ + connection.delete(key) + + def _acquire_entry( + self, + key: str, + limit: int, + expiry: int, + connection: RedisClient, + amount: int = 1, + ) -> bool: + """ + :param key: rate limit key to acquire an entry in + :param limit: amount of entries allowed + :param expiry: expiry of the entry + :param connection: Redis connection + :param amount: the number of entries to acquire + """ + timestamp = time.time() + acquired = self.lua_acquire_window([key], [timestamp, limit, expiry, amount]) + + return bool(acquired) + + def _get_expiry(self, key: str, connection: RedisClient) -> int: + """ + :param key: the key to get the expiry for + :param connection: Redis connection + """ + + return int(max(connection.ttl(key), 0) + time.time()) + + def _check(self, connection: RedisClient) -> bool: + """ + :param connection: Redis connection + check if storage is healthy + """ + try: + return connection.ping() + except: # noqa + return False + + +class RedisStorage(RedisInteractor, Storage, MovingWindowSupport): + """ + Rate limit storage with redis as backend. + + Depends on :pypi:`redis`. + """ + + STORAGE_SCHEME = ["redis", "rediss", "redis+unix"] + """The storage scheme for redis""" + + DEPENDENCIES = {"redis": Version("3.0")} + + def __init__( + self, + uri: str, + connection_pool: Optional[redis.connection.ConnectionPool] = None, + **options: Union[float, str, bool], + ) -> None: + """ + :param uri: uri of the form ``redis://[:password]@host:port``, + ``redis://[:password]@host:port/db``, + ``rediss://[:password]@host:port``, ``redis+unix:///path/to/sock`` etc. + This uri is passed directly to :func:`redis.from_url` except for the + case of ``redis+unix://`` where it is replaced with ``unix://``. + :param connection_pool: if provided, the redis client is initialized with + the connection pool and any other params passed as :paramref:`options` + :param options: all remaining keyword arguments are passed + directly to the constructor of :class:`redis.Redis` + :raise ConfigurationError: when the :pypi:`redis` library is not available + """ + super().__init__(uri, **options) + redis = self.dependencies["redis"].module + + uri = uri.replace("redis+unix", "unix") + + if not connection_pool: + self.storage = redis.from_url(uri, **options) + else: + self.storage = redis.Redis(connection_pool=connection_pool, **options) + self.initialize_storage(uri) + + def initialize_storage(self, _uri: str) -> None: + self.lua_moving_window = self.storage.register_script(self.SCRIPT_MOVING_WINDOW) + self.lua_acquire_window = self.storage.register_script( + self.SCRIPT_ACQUIRE_MOVING_WINDOW + ) + self.lua_clear_keys = self.storage.register_script(self.SCRIPT_CLEAR_KEYS) + self.lua_incr_expire = self.storage.register_script( + RedisStorage.SCRIPT_INCR_EXPIRE + ) + + def incr( + self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1 + ) -> int: + """ + increments the counter for a given rate limit key + + :param key: the key to increment + :param expiry: amount in seconds for the key to expire in + :param amount: the number to increment by + """ + + if elastic_expiry: + return super()._incr(key, expiry, self.storage, elastic_expiry, amount) + else: + return int(self.lua_incr_expire([key], [expiry, amount])) + + def get(self, key: str) -> int: + """ + :param key: the key to get the counter value for + """ + + return super()._get(key, self.storage) + + def clear(self, key: str) -> None: + """ + :param key: the key to clear rate limits for + """ + + return super()._clear(key, self.storage) + + def acquire_entry(self, key: str, limit: int, expiry: int, amount: int = 1) -> bool: + """ + :param key: rate limit key to acquire an entry in + :param limit: amount of entries allowed + :param expiry: expiry of the entry + :param amount: the number to increment by + """ + + return super()._acquire_entry(key, limit, expiry, self.storage, amount) + + def get_expiry(self, key: str) -> int: + """ + :param key: the key to get the expiry for + """ + + return super()._get_expiry(key, self.storage) + + def check(self) -> bool: + """ + check if storage is healthy + """ + + return super()._check(self.storage) + + def reset(self) -> Optional[int]: + """ + This function calls a Lua Script to delete keys prefixed with 'LIMITER' + in block of 5000. + + .. warning:: + This operation was designed to be fast, but was not tested + on a large production based system. Be careful with its usage as it + could be slow on very large data sets. + + """ + + return int(self.lua_clear_keys(["LIMITER*"])) diff --git a/venv/lib/python3.10/site-packages/limits/storage/redis_cluster.py b/venv/lib/python3.10/site-packages/limits/storage/redis_cluster.py new file mode 100644 index 0000000..d402607 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/storage/redis_cluster.py @@ -0,0 +1,128 @@ +import urllib +import warnings +from typing import cast + +from deprecated.sphinx import versionchanged +from packaging.version import Version + +from limits.errors import ConfigurationError +from limits.storage.redis import RedisStorage +from limits.typing import Dict, List, Optional, Tuple, Union + + +@versionchanged( + version="2.5.0", + reason=""" +Cluster support was provided by the :pypi:`redis-py-cluster` library +which has been absorbed into the official :pypi:`redis` client. By +default the :class:`redis.cluster.RedisCluster` client will be used +however if the version of the package is lower than ``4.2.0`` the implementation +will fallback to trying to use :class:`rediscluster.RedisCluster`. +""", +) +class RedisClusterStorage(RedisStorage): + """ + Rate limit storage with redis cluster as backend + + Depends on :pypi:`redis`. + """ + + STORAGE_SCHEME = ["redis+cluster"] + """The storage scheme for redis cluster""" + + DEFAULT_OPTIONS: Dict[str, Union[float, str, bool]] = { + "max_connections": 1000, + } + "Default options passed to the :class:`~redis.cluster.RedisCluster`" + + DEPENDENCIES = { + "redis": Version("4.2.0"), + "rediscluster": Version("2.0.0"), # Deprecated since 2.6.0 + } + + def __init__(self, uri: str, **options: Union[float, str, bool]) -> None: + """ + :param uri: url of the form + ``redis+cluster://[:password]@host:port,host:port`` + :param options: all remaining keyword arguments are passed + directly to the constructor of :class:`redis.cluster.RedisCluster` + :raise ConfigurationError: when the :pypi:`redis` library is not + available or if the redis cluster cannot be reached. + """ + parsed = urllib.parse.urlparse(uri) + cluster_hosts = [] + for loc in parsed.netloc.split(","): + host, port = loc.split(":") + cluster_hosts.append((host, int(port))) + + self.storage = None + self.using_redis_py = False + self.__pick_storage(cluster_hosts, **{**self.DEFAULT_OPTIONS, **options}) + assert self.storage + self.initialize_storage(uri) + super(RedisStorage, self).__init__(uri, **options) + + def __pick_storage( + self, cluster_hosts: List[Tuple[str, int]], **options: Union[float, str, bool] + ) -> None: + try: + redis_py = self.dependencies["redis"].module + startup_nodes = [redis_py.cluster.ClusterNode(*c) for c in cluster_hosts] + self.storage = redis_py.cluster.RedisCluster( + startup_nodes=startup_nodes, **options + ) + self.using_redis_py = True + return + except ConfigurationError: # pragma: no cover + self.__use_legacy_cluster_implementation(cluster_hosts, **options) + if not self.storage: + raise ConfigurationError( + ( + "Unable to find an implementation for redis cluster" + " Cluster support requires either redis-py>=4.2 or" + " redis-py-cluster" + ) + ) + + def __use_legacy_cluster_implementation( + self, cluster_hosts: List[Tuple[str, int]], **options: Union[float, str, bool] + ) -> None: # pragma: no cover + redis_cluster = self.dependencies["rediscluster"].module + warnings.warn( + ( + "Using redis-py-cluster is deprecated as the library has been" + " absorbed by redis-py (>=4.2). The support will be eventually " + " removed from the limits library and is no longer tested " + " against since version: 2.6. To get rid of this warning, " + " uninstall redis-py-cluster and ensure redis-py>=4.2.0 is installed" + ) + ) + self.storage = redis_cluster.RedisCluster( + startup_nodes=[{"host": c[0], "port": c[1]} for c in cluster_hosts], + **options + ) + + def reset(self) -> Optional[int]: + """ + Redis Clusters are sharded and deleting across shards + can't be done atomically. Because of this, this reset loops over all + keys that are prefixed with 'LIMITER' and calls delete on them, one at + a time. + + .. warning:: + This operation was not tested with extremely large data sets. + On a large production based system, care should be taken with its + usage as it could be slow on very large data sets""" + + if self.using_redis_py: + count = 0 + for primary in self.storage.get_primaries(): + node = self.storage.get_redis_connection(primary) + keys = node.keys("LIMITER*") + count += sum([node.delete(k.decode("utf-8")) for k in keys]) + return count + else: # pragma: no cover + keys = self.storage.keys("LIMITER*") + return cast( + int, sum([self.storage.delete(k.decode("utf-8")) for k in keys]) + ) diff --git a/venv/lib/python3.10/site-packages/limits/storage/redis_sentinel.py b/venv/lib/python3.10/site-packages/limits/storage/redis_sentinel.py new file mode 100644 index 0000000..6b0dfef --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/storage/redis_sentinel.py @@ -0,0 +1,113 @@ +import urllib.parse +from typing import TYPE_CHECKING + +from packaging.version import Version + +from limits.errors import ConfigurationError +from limits.storage.redis import RedisStorage +from limits.typing import Dict, Optional, Union + +if TYPE_CHECKING: + import redis.sentinel + + +class RedisSentinelStorage(RedisStorage): + """ + Rate limit storage with redis sentinel as backend + + Depends on :pypi:`redis` package + """ + + STORAGE_SCHEME = ["redis+sentinel"] + """The storage scheme for redis accessed via a redis sentinel installation""" + + DEFAULT_OPTIONS: Dict[str, Union[float, str, bool]] = { + "socket_timeout": 0.2, + } + "Default options passed to :class:`~redis.sentinel.Sentinel`" + + DEPENDENCIES = {"redis.sentinel": Version("3.0")} + + def __init__( + self, + uri: str, + service_name: Optional[str] = None, + use_replicas: bool = True, + sentinel_kwargs: Optional[Dict[str, Union[float, str, bool]]] = None, + **options: Union[float, str, bool] + ) -> None: + """ + :param uri: url of the form + ``redis+sentinel://host:port,host:port/service_name`` + :param service_name: sentinel service name + (if not provided in :attr:`uri`) + :param use_replicas: Whether to use replicas for read only operations + :param sentinel_kwargs: kwargs to pass as + :attr:`sentinel_kwargs` to :class:`redis.sentinel.Sentinel` + :param options: all remaining keyword arguments are passed + directly to the constructor of :class:`redis.sentinel.Sentinel` + :raise ConfigurationError: when the redis library is not available + or if the redis master host cannot be pinged. + """ + + super(RedisStorage, self).__init__(uri, **options) + + parsed = urllib.parse.urlparse(uri) + sentinel_configuration = [] + sentinel_options = sentinel_kwargs.copy() if sentinel_kwargs else {} + + parsed_auth: Dict[str, Union[float, str, bool]] = {} + + if parsed.username: + parsed_auth["username"] = parsed.username + if parsed.password: + parsed_auth["password"] = parsed.password + + sep = parsed.netloc.find("@") + 1 + + for loc in parsed.netloc[sep:].split(","): + host, port = loc.split(":") + sentinel_configuration.append((host, int(port))) + self.service_name = ( + parsed.path.replace("/", "") if parsed.path else service_name + ) + + if self.service_name is None: + raise ConfigurationError("'service_name' not provided") + + sentinel_dep = self.dependencies["redis.sentinel"].module + self.sentinel: "redis.sentinel.Sentinel" = sentinel_dep.Sentinel( + sentinel_configuration, + sentinel_kwargs={**parsed_auth, **sentinel_options}, + **{**self.DEFAULT_OPTIONS, **parsed_auth, **options} + ) + self.storage = self.sentinel.master_for(self.service_name) + self.storage_slave = self.sentinel.slave_for(self.service_name) + self.use_replicas = use_replicas + self.initialize_storage(uri) + + def get(self, key: str) -> int: + """ + :param key: the key to get the counter value for + """ + + return super()._get( + key, self.storage_slave if self.use_replicas else self.storage + ) + + def get_expiry(self, key: str) -> int: + """ + :param key: the key to get the expiry for + """ + + return super()._get_expiry( + key, self.storage_slave if self.use_replicas else self.storage + ) + + def check(self) -> bool: + """ + Check if storage is healthy by calling :class:`aredis.StrictRedis.ping` + on the slave. + """ + + return super()._check(self.storage_slave if self.use_replicas else self.storage) diff --git a/venv/lib/python3.10/site-packages/limits/storage/registry.py b/venv/lib/python3.10/site-packages/limits/storage/registry.py new file mode 100644 index 0000000..c9a4049 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/storage/registry.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from abc import ABCMeta + +from limits.typing import Dict, List, Tuple, Union + +SCHEMES: Dict[str, StorageRegistry] = {} + + +class StorageRegistry(ABCMeta): + def __new__( + mcs, name: str, bases: Tuple[type, ...], dct: Dict[str, Union[str, List[str]]] + ) -> StorageRegistry: + storage_scheme = dct.get("STORAGE_SCHEME", None) + cls = super().__new__(mcs, name, bases, dct) + + if storage_scheme: + if isinstance(storage_scheme, str): # noqa + schemes = [storage_scheme] + else: + schemes = storage_scheme + + for scheme in schemes: + SCHEMES[scheme] = cls + + return cls diff --git a/venv/lib/python3.10/site-packages/limits/strategies.py b/venv/lib/python3.10/site-packages/limits/strategies.py new file mode 100644 index 0000000..c68cd18 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/strategies.py @@ -0,0 +1,215 @@ +""" +Rate limiting strategies +""" + +import weakref +from abc import ABCMeta, abstractmethod +from typing import Dict, Tuple, Type, Union, cast + +from .limits import RateLimitItem +from .storage import MovingWindowSupport, Storage, StorageTypes + + +class RateLimiter(metaclass=ABCMeta): + def __init__(self, storage: StorageTypes): + assert isinstance(storage, Storage) + self.storage: Storage = weakref.proxy(storage) + + @abstractmethod + def hit(self, item: RateLimitItem, *identifiers: str, cost: int = 1) -> bool: + """ + Consume the rate limit + + :param item: The rate limit item + :param identifiers: variable list of strings to uniquely identify this + instance of the limit + :param cost: The cost of this hit, default 1 + """ + raise NotImplementedError + + @abstractmethod + def test(self, item: RateLimitItem, *identifiers: str) -> bool: + """ + Check the rate limit without consuming from it. + + :param item: The rate limit item + :param identifiers: variable list of strings to uniquely identify this + instance of the limit + """ + raise NotImplementedError + + @abstractmethod + def get_window_stats( + self, item: RateLimitItem, *identifiers: str + ) -> Tuple[int, int]: + """ + Query the reset time and remaining amount for the limit + + :param item: The rate limit item + :param identifiers: variable list of strings to uniquely identify this + instance of the limit + :return: (reset time, remaining) + """ + raise NotImplementedError + + def clear(self, item: RateLimitItem, *identifiers: str) -> None: + return self.storage.clear(item.key_for(*identifiers)) + + +class MovingWindowRateLimiter(RateLimiter): + """ + Reference: :ref:`strategies:moving window` + """ + + def __init__(self, storage: StorageTypes): + if not ( + hasattr(storage, "acquire_entry") or hasattr(storage, "get_moving_window") + ): + raise NotImplementedError( + "MovingWindowRateLimiting is not implemented for storage " + "of type %s" % storage.__class__ + ) + super().__init__(storage) + + def hit(self, item: RateLimitItem, *identifiers: str, cost: int = 1) -> bool: + """ + Consume the rate limit + + :param item: The rate limit item + :param identifiers: variable list of strings to uniquely identify this + instance of the limit + :param cost: The cost of this hit, default 1 + :return: (reset time, remaining) + """ + + return cast(MovingWindowSupport, self.storage).acquire_entry( + item.key_for(*identifiers), item.amount, item.get_expiry(), amount=cost + ) + + def test(self, item: RateLimitItem, *identifiers: str) -> bool: + """ + Check if the rate limit can be consumed + + :param item: The rate limit item + :param identifiers: variable list of strings to uniquely identify this + instance of the limit + """ + + return ( + cast(MovingWindowSupport, self.storage).get_moving_window( + item.key_for(*identifiers), + item.amount, + item.get_expiry(), + )[1] + < item.amount + ) + + def get_window_stats( + self, item: RateLimitItem, *identifiers: str + ) -> Tuple[int, int]: + """ + returns the number of requests remaining within this limit. + + :param item: The rate limit item + :param identifiers: variable list of strings to uniquely identify this + instance of the limit + :return: tuple (reset time, remaining) + """ + window_start, window_items = cast( + MovingWindowSupport, self.storage + ).get_moving_window(item.key_for(*identifiers), item.amount, item.get_expiry()) + reset = window_start + item.get_expiry() + + return (reset, item.amount - window_items) + + +class FixedWindowRateLimiter(RateLimiter): + """ + Reference: :ref:`strategies:fixed window` + """ + + def hit(self, item: RateLimitItem, *identifiers: str, cost: int = 1) -> bool: + """ + Consume the rate limit + + :param item: The rate limit item + :param identifiers: variable list of strings to uniquely identify this + instance of the limit + :param cost: The cost of this hit, default 1 + """ + + return ( + self.storage.incr( + item.key_for(*identifiers), + item.get_expiry(), + elastic_expiry=False, + amount=cost, + ) + <= item.amount + ) + + def test(self, item: RateLimitItem, *identifiers: str) -> bool: + """ + Check if the rate limit can be consumed + + :param item: The rate limit item + :param identifiers: variable list of strings to uniquely identify this + instance of the limit + """ + + return self.storage.get(item.key_for(*identifiers)) < item.amount + + def get_window_stats( + self, item: RateLimitItem, *identifiers: str + ) -> Tuple[int, int]: + """ + Query the reset time and remaining amount for the limit + + :param item: The rate limit item + :param identifiers: variable list of strings to uniquely identify this + instance of the limit + :return: (reset time, remaining) + """ + remaining = max(0, item.amount - self.storage.get(item.key_for(*identifiers))) + reset = self.storage.get_expiry(item.key_for(*identifiers)) + + return (reset, remaining) + + +class FixedWindowElasticExpiryRateLimiter(FixedWindowRateLimiter): + """ + Reference: :ref:`strategies:fixed window with elastic expiry` + """ + + def hit(self, item: RateLimitItem, *identifiers: str, cost: int = 1) -> bool: + """ + Consume the rate limit + + :param item: The rate limit item + :param identifiers: variable list of strings to uniquely identify this + instance of the limit + :param cost: The cost of this hit, default 1 + """ + + return ( + self.storage.incr( + item.key_for(*identifiers), + item.get_expiry(), + elastic_expiry=True, + amount=cost, + ) + <= item.amount + ) + + +KnownStrategy = Union[ + Type[FixedWindowRateLimiter], + Type[FixedWindowElasticExpiryRateLimiter], + Type[MovingWindowRateLimiter], +] + +STRATEGIES: Dict[str, KnownStrategy] = { + "fixed-window": FixedWindowRateLimiter, + "fixed-window-elastic-expiry": FixedWindowElasticExpiryRateLimiter, + "moving-window": MovingWindowRateLimiter, +} diff --git a/venv/lib/python3.10/site-packages/limits/typing.py b/venv/lib/python3.10/site-packages/limits/typing.py new file mode 100644 index 0000000..22f15e2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/typing.py @@ -0,0 +1,144 @@ +from typing import ( + TYPE_CHECKING, + Callable, + Dict, + List, + Optional, + Tuple, + Type, + TypeVar, + Union, +) + +from typing_extensions import ClassVar, Counter, ParamSpec, Protocol + +Serializable = Union[int, str, float] + +R = TypeVar("R") +R_co = TypeVar("R_co", covariant=True) +P = ParamSpec("P") + + +if TYPE_CHECKING: + import coredis + import coredis.commands.script + import redis + + +class ItemP(Protocol): + value: bytes + flags: Optional[int] + cas: Optional[int] + + +class EmcacheClientP(Protocol): + async def add( + self, + key: bytes, + value: bytes, + *, + flags: int = 0, + exptime: int = 0, + noreply: bool = False, + ) -> None: + ... + + async def get(self, key: bytes, return_flags: bool = False) -> Optional[ItemP]: + ... + + async def gets(self, key: bytes, return_flags: bool = False) -> Optional[ItemP]: + ... + + async def increment( + self, key: bytes, value: int, *, noreply: bool = False + ) -> Optional[int]: + ... + + async def delete(self, key: bytes, *, noreply: bool = False) -> None: + ... + + async def set( + self, + key: bytes, + value: bytes, + *, + flags: int = 0, + exptime: int = 0, + noreply: bool = False, + ) -> None: + ... + + async def touch(self, key: bytes, exptime: int, *, noreply: bool = False) -> None: + ... + + +class MemcachedClientP(Protocol): + def add( + self, + key: str, + value: Serializable, + expire: Optional[int] = 0, + noreply: Optional[bool] = None, + flags: Optional[int] = None, + ) -> bool: + ... + + def get(self, key: str, default: Optional[str] = None) -> bytes: + ... + + def incr(self, key: str, value: int, noreply: Optional[bool] = False) -> int: + ... + + def delete(self, key: str, noreply: Optional[bool] = None) -> Optional[bool]: + ... + + def set( + self, + key: str, + value: Serializable, + expire: int = 0, + noreply: Optional[bool] = None, + flags: Optional[int] = None, + ) -> bool: + ... + + def touch( + self, key: str, expire: Optional[int] = 0, noreply: Optional[bool] = None + ) -> bool: + ... + + +AsyncRedisClient = Union["coredis.Redis[bytes]", "coredis.RedisCluster[bytes]"] +RedisClient = Union["redis.Redis", "redis.cluster.RedisCluster"] + + +class ScriptP(Protocol[R_co]): + def __call__(self, keys: List[Serializable], args: List[Serializable]) -> R_co: + ... + + +__all__ = [ + "AsyncRedisClient", + "Callable", + "ClassVar", + "Counter", + "Dict", + "EmcacheClientP", + "ItemP", + "List", + "MemcachedClientP", + "Optional", + "P", + "ParamSpec", + "Protocol", + "ScriptP", + "Serializable", + "TypeVar", + "R", + "R_co", + "RedisClient", + "Tuple", + "Type", + "TypeVar", + "Union", +] diff --git a/venv/lib/python3.10/site-packages/limits/util.py b/venv/lib/python3.10/site-packages/limits/util.py new file mode 100644 index 0000000..c25c1b0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/util.py @@ -0,0 +1,182 @@ +""" + +""" +import dataclasses +import re +import sys +from collections import UserDict +from types import ModuleType +from typing import TYPE_CHECKING + +import pkg_resources +from packaging.version import Version + +from limits.typing import Dict, List, Optional, Tuple, Type, Union + +from .errors import ConfigurationError +from .limits import GRANULARITIES, RateLimitItem + +SEPARATORS = re.compile(r"[,;|]{1}") +SINGLE_EXPR = re.compile( + r""" + \s*([0-9]+) + \s*(/|\s*per\s*) + \s*([0-9]+) + *\s*(hour|minute|second|day|month|year)s?\s*""", + re.IGNORECASE | re.VERBOSE, +) +EXPR = re.compile( + r"^{SINGLE}(:?{SEPARATORS}{SINGLE})*$".format( + SINGLE=SINGLE_EXPR.pattern, SEPARATORS=SEPARATORS.pattern + ), + re.IGNORECASE | re.VERBOSE, +) + + +@dataclasses.dataclass +class Dependency: + name: str + version_required: Optional[Version] + version_found: Optional[Version] + module: ModuleType + + +if TYPE_CHECKING: + _UserDict = UserDict[str, Dependency] +else: + _UserDict = UserDict + + +class DependencyDict(_UserDict): + Missing = Dependency("Missing", None, None, ModuleType("Missing")) + + def __getitem__(self, key: str) -> Dependency: + dependency = super().__getitem__(key) + if dependency == DependencyDict.Missing: + raise ConfigurationError(f"{key} prerequisite not available") + elif dependency.version_required and ( + not dependency.version_found + or dependency.version_found < dependency.version_required + ): + raise ConfigurationError( + f"The minimum version of {dependency.version_required}" + f" of {dependency.name} could not be found" + ) + return dependency + + +class LazyDependency: + """ + Simple utility that provides an :attr:`dependency` + to the child class to fetch any dependencies + without having to import them explicitly. + """ + + DEPENDENCIES: Union[Dict[str, Optional[Version]], List[str]] = [] + """ + The python modules this class has a dependency on. + Used to lazily populate the :attr:`dependencies` + """ + + def __init__(self) -> None: + self._dependencies: DependencyDict = DependencyDict() + + @property + def dependencies(self) -> DependencyDict: + """ + Cached mapping of the modules this storage depends on. + This is done so that the module is only imported lazily + when the storage is instantiated. + + :meta private: + """ + if not getattr(self, "_dependencies", None): + dependencies = DependencyDict() + mapping: Dict[str, Optional[Version]] + + if isinstance(self.DEPENDENCIES, list): + mapping = {dependency: None for dependency in self.DEPENDENCIES} + else: + mapping = self.DEPENDENCIES + + for name, minimum_version in mapping.items(): + dependency, version = get_dependency(name) + if not dependency: + dependencies[name] = DependencyDict.Missing + else: + dependencies[name] = Dependency( + name, minimum_version, version, dependency + ) + self._dependencies = dependencies + return self._dependencies + + +def get_dependency(module_path: str) -> Tuple[Optional[ModuleType], Optional[Version]]: + """ + safe function to import a module at runtime + """ + try: + if module_path not in sys.modules: + __import__(module_path) + root = module_path.split(".")[0] + version = getattr(sys.modules[root], "__version__", "0.0.0") + return sys.modules[module_path], Version(version) + except ImportError: # pragma: no cover + return None, None + + +def get_package_data(path: str) -> bytes: + return pkg_resources.resource_string(__name__, path) + + +def parse_many(limit_string: str) -> List[RateLimitItem]: + """ + parses rate limits in string notation containing multiple rate limits + (e.g. ``1/second; 5/minute``) + + :param limit_string: rate limit string using :ref:`ratelimit-string` + :raise ValueError: if the string notation is invalid. + + """ + + if not (isinstance(limit_string, str) and EXPR.match(limit_string)): + raise ValueError("couldn't parse rate limit string '%s'" % limit_string) + limits = [] + + for limit in SEPARATORS.split(limit_string): + match = SINGLE_EXPR.match(limit) + + if match: + amount, _, multiples, granularity_string = match.groups() + granularity = granularity_from_string(granularity_string) + limits.append( + granularity(int(amount), multiples and int(multiples) or None) + ) + + return limits + + +def parse(limit_string: str) -> RateLimitItem: + """ + parses a single rate limit in string notation + (e.g. ``1/second`` or ``1 per second``) + + :param limit_string: rate limit string using :ref:`ratelimit-string` + :raise ValueError: if the string notation is invalid. + + """ + + return list(parse_many(limit_string))[0] + + +def granularity_from_string(granularity_string: str) -> Type[RateLimitItem]: + """ + + :param granularity_string: + :raise ValueError: + """ + + for granularity in GRANULARITIES.values(): + if granularity.check_granularity_string(granularity_string): + return granularity + raise ValueError("no granularity matched for %s" % granularity_string) diff --git a/venv/lib/python3.10/site-packages/limits/version.py b/venv/lib/python3.10/site-packages/limits/version.py new file mode 100644 index 0000000..1123344 --- /dev/null +++ b/venv/lib/python3.10/site-packages/limits/version.py @@ -0,0 +1,3 @@ +""" +empty file to be updated by versioneer +""" diff --git a/venv/lib/python3.10/site-packages/loguru-0.6.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/loguru-0.6.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.10/site-packages/loguru-0.6.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/loguru-0.6.0.dist-info/LICENSE b/venv/lib/python3.10/site-packages/loguru-0.6.0.dist-info/LICENSE new file mode 100644 index 0000000..8864d4a --- /dev/null +++ b/venv/lib/python3.10/site-packages/loguru-0.6.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/loguru-0.6.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/loguru-0.6.0.dist-info/METADATA new file mode 100644 index 0000000..889822e --- /dev/null +++ b/venv/lib/python3.10/site-packages/loguru-0.6.0.dist-info/METADATA @@ -0,0 +1,558 @@ +Metadata-Version: 2.1 +Name: loguru +Version: 0.6.0 +Summary: Python logging made (stupidly) simple +Home-page: https://github.com/Delgan/loguru +Author: Delgan +Author-email: delgan.py@gmail.com +License: MIT license +Download-URL: https://github.com/Delgan/loguru/archive/0.6.0.tar.gz +Project-URL: Changelog, https://github.com/Delgan/loguru/blob/master/CHANGELOG.rst +Project-URL: Documentation, https://loguru.readthedocs.io/en/stable/index.html +Keywords: loguru,logging,logger,log +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Topic :: System :: Logging +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: Implementation :: CPython +Requires-Python: >=3.5 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: aiocontextvars (>=0.2.0) ; python_version < "3.7" +Requires-Dist: colorama (>=0.3.4) ; sys_platform == "win32" +Requires-Dist: win32-setctime (>=1.0.0) ; sys_platform == "win32" +Provides-Extra: dev +Requires-Dist: colorama (>=0.3.4) ; extra == 'dev' +Requires-Dist: docutils (==0.16) ; extra == 'dev' +Requires-Dist: flake8 (>=3.7.7) ; extra == 'dev' +Requires-Dist: tox (>=3.9.0) ; extra == 'dev' +Requires-Dist: pytest (>=4.6.2) ; extra == 'dev' +Requires-Dist: pytest-cov (>=2.7.1) ; extra == 'dev' +Requires-Dist: black (>=19.10b0) ; (python_version >= "3.6") and extra == 'dev' +Requires-Dist: isort (>=5.1.1) ; (python_version >= "3.6") and extra == 'dev' +Requires-Dist: Sphinx (>=4.1.1) ; (python_version >= "3.6") and extra == 'dev' +Requires-Dist: sphinx-autobuild (>=0.7.1) ; (python_version >= "3.6") and extra == 'dev' +Requires-Dist: sphinx-rtd-theme (>=0.4.3) ; (python_version >= "3.6") and extra == 'dev' + +.. image:: https://raw.githubusercontent.com/Delgan/loguru/master/docs/_static/img/logo.png + :target: https://github.com/Delgan/loguru + :align: center + :alt: Loguru logo + +.. image:: https://raw.githubusercontent.com/Delgan/loguru/master/docs/_static/img/demo.gif + :target: https://github.com/Delgan/loguru + :align: center + :alt: Loguru demo + +========= + +.. image:: https://img.shields.io/pypi/v/loguru.svg + :target: https://pypi.python.org/pypi/loguru + :alt: Pypi version + +.. image:: https://img.shields.io/badge/python-3.5%2B%20%7C%20PyPy-blue.svg + :target: https://github.com/Delgan/loguru/blob/master/.travis.yml + :alt: Python versions + +.. image:: https://img.shields.io/readthedocs/loguru.svg + :target: https://loguru.readthedocs.io/en/stable/index.html + :alt: Docs + +.. image:: https://img.shields.io/github/workflow/status/Delgan/loguru/Tests/master + :target: https://github.com/Delgan/loguru/actions/workflows/tests.yml?query=branch:master + :alt: Build status + +.. image:: https://img.shields.io/codecov/c/github/delgan/loguru/master.svg + :target: https://codecov.io/gh/delgan/loguru/branch/master + :alt: Coverage + +.. image:: https://img.shields.io/codacy/grade/be7337df3c0d40d1929eb7f79b1671a6.svg + :target: https://app.codacy.com/gh/Delgan/loguru/dashboard + :alt: Code quality + +.. image:: https://img.shields.io/github/license/delgan/loguru.svg + :target: https://github.com/Delgan/loguru/blob/master/LICENSE + :alt: License + +**Loguru** is a library which aims to bring enjoyable logging in Python. + +Did you ever feel lazy about configuring a logger and used ``print()`` instead?... I did, yet logging is fundamental to every application and eases the process of debugging. Using **Loguru** you have no excuse not to use logging from the start, this is as simple as ``from loguru import logger``. + +Also, this library is intended to make Python logging less painful by adding a bunch of useful functionalities that solve caveats of the standard loggers. Using logs in your application should be an automatism, **Loguru** tries to make it both pleasant and powerful. + + +.. end-of-readme-intro + +Installation +------------ + +:: + + pip install loguru + + +Features +-------- + +* `Ready to use out of the box without boilerplate`_ +* `No Handler, no Formatter, no Filter: one function to rule them all`_ +* `Easier file logging with rotation / retention / compression`_ +* `Modern string formatting using braces style`_ +* `Exceptions catching within threads or main`_ +* `Pretty logging with colors`_ +* `Asynchronous, Thread-safe, Multiprocess-safe`_ +* `Fully descriptive exceptions`_ +* `Structured logging as needed`_ +* `Lazy evaluation of expensive functions`_ +* `Customizable levels`_ +* `Better datetime handling`_ +* `Suitable for scripts and libraries`_ +* `Entirely compatible with standard logging`_ +* `Personalizable defaults through environment variables`_ +* `Convenient parser`_ +* `Exhaustive notifier`_ +* `10x faster than built-in logging`_ + +Take the tour +------------- + +.. |logger| replace:: ``logger`` +.. _logger: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger + +.. |add| replace:: ``add()`` +.. _add: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.add + +.. |remove| replace:: ``remove()`` +.. _remove: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.remove + +.. |complete| replace:: ``complete()`` +.. _complete: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.complete + +.. |catch| replace:: ``catch()`` +.. _catch: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.catch + +.. |bind| replace:: ``bind()`` +.. _bind: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.bind + +.. |contextualize| replace:: ``contextualize()`` +.. _contextualize: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.contextualize + +.. |patch| replace:: ``patch()`` +.. _patch: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.patch + +.. |opt| replace:: ``opt()`` +.. _opt: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.opt + +.. |trace| replace:: ``trace()`` +.. _trace: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.trace + +.. |success| replace:: ``success()`` +.. _success: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.success + +.. |level| replace:: ``level()`` +.. _level: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.level + +.. |configure| replace:: ``configure()`` +.. _configure: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.configure + +.. |disable| replace:: ``disable()`` +.. _disable: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.disable + +.. |enable| replace:: ``enable()`` +.. _enable: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.enable + +.. |parse| replace:: ``parse()`` +.. _parse: https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.parse + +.. _sinks: https://loguru.readthedocs.io/en/stable/api/logger.html#sink +.. _record dict: https://loguru.readthedocs.io/en/stable/api/logger.html#record +.. _log messages: https://loguru.readthedocs.io/en/stable/api/logger.html#message +.. _easily configurable: https://loguru.readthedocs.io/en/stable/api/logger.html#file +.. _markup tags: https://loguru.readthedocs.io/en/stable/api/logger.html#color +.. _fixes it: https://loguru.readthedocs.io/en/stable/api/logger.html#time +.. _No problem: https://loguru.readthedocs.io/en/stable/api/logger.html#env +.. _logging levels: https://loguru.readthedocs.io/en/stable/api/logger.html#levels + +.. |better_exceptions| replace:: ``better_exceptions`` +.. _better_exceptions: https://github.com/Qix-/better-exceptions + +.. |notifiers| replace:: ``notifiers`` +.. _notifiers: https://github.com/notifiers/notifiers + +Ready to use out of the box without boilerplate +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The main concept of `Loguru` is that **there is one and only one** |logger|_. + +For convenience, it is pre-configured and outputs to ``stderr`` to begin with (but that's entirely configurable). + +:: + + from loguru import logger + + logger.debug("That's it, beautiful and simple logging!") + +The |logger|_ is just an interface which dispatches log messages to configured handlers. Simple, right? + + +No Handler, no Formatter, no Filter: one function to rule them all +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +How to add a handler? How to set up logs formatting? How to filter messages? How to set level? + +One answer: the |add|_ function. + +:: + + logger.add(sys.stderr, format="{time} {level} {message}", filter="my_module", level="INFO") + +This function should be used to register sinks_ which are responsible for managing `log messages`_ contextualized with a `record dict`_. A sink can take many forms: a simple function, a string path, a file-like object, a coroutine function or a built-in Handler. + +Note that you may also |remove|_ a previously added handler by using the identifier returned while adding it. This is particularly useful if you want to supersede the default ``stderr`` handler: just call ``logger.remove()`` to make a fresh start. + + +Easier file logging with rotation / retention / compression +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you want to send logged messages to a file, you just have to use a string path as the sink. It can be automatically timed too for convenience:: + + logger.add("file_{time}.log") + +It is also `easily configurable`_ if you need rotating logger, if you want to remove older logs, or if you wish to compress your files at closure. + +:: + + logger.add("file_1.log", rotation="500 MB") # Automatically rotate too big file + logger.add("file_2.log", rotation="12:00") # New file is created each day at noon + logger.add("file_3.log", rotation="1 week") # Once the file is too old, it's rotated + + logger.add("file_X.log", retention="10 days") # Cleanup after some time + + logger.add("file_Y.log", compression="zip") # Save some loved space + + +Modern string formatting using braces style +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +`Loguru` favors the much more elegant and powerful ``{}`` formatting over ``%``, logging functions are actually equivalent to ``str.format()``. + +:: + + logger.info("If you're using Python {}, prefer {feature} of course!", 3.6, feature="f-strings") + + +Exceptions catching within threads or main +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Have you ever seen your program crashing unexpectedly without seeing anything in the log file? Did you ever notice that exceptions occurring in threads were not logged? This can be solved using the |catch|_ decorator / context manager which ensures that any error is correctly propagated to the |logger|_. + +:: + + @logger.catch + def my_function(x, y, z): + # An error? It's caught anyway! + return 1 / (x + y + z) + + +Pretty logging with colors +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +`Loguru` automatically adds colors to your logs if your terminal is compatible. You can define your favorite style by using `markup tags`_ in the sink format. + +:: + + logger.add(sys.stdout, colorize=True, format="{time} {message}") + + +Asynchronous, Thread-safe, Multiprocess-safe +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +All sinks added to the |logger|_ are thread-safe by default. They are not multiprocess-safe, but you can ``enqueue`` the messages to ensure logs integrity. This same argument can also be used if you want async logging. + +:: + + logger.add("somefile.log", enqueue=True) + +Coroutine functions used as sinks are also supported and should be awaited with |complete|_. + + +Fully descriptive exceptions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Logging exceptions that occur in your code is important to track bugs, but it's quite useless if you don't know why it failed. `Loguru` helps you identify problems by allowing the entire stack trace to be displayed, including values of variables (thanks |better_exceptions|_ for this!). + +The code:: + + logger.add("out.log", backtrace=True, diagnose=True) # Caution, may leak sensitive data in prod + + def func(a, b): + return a / b + + def nested(c): + try: + func(5, c) + except ZeroDivisionError: + logger.exception("What?!") + + nested(0) + +Would result in: + +.. code-block:: + + 2018-07-17 01:38:43.975 | ERROR | __main__:nested:10 - What?! + Traceback (most recent call last): + + File "test.py", line 12, in + nested(0) + └ + + > File "test.py", line 8, in nested + func(5, c) + │ └ 0 + └ + + File "test.py", line 4, in func + return a / b + │ └ 0 + └ 5 + + ZeroDivisionError: division by zero + + +Structured logging as needed +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Want your logs to be serialized for easier parsing or to pass them around? Using the ``serialize`` argument, each log message will be converted to a JSON string before being sent to the configured sink. + +:: + + logger.add(custom_sink_function, serialize=True) + +Using |bind|_ you can contextualize your logger messages by modifying the `extra` record attribute. + +:: + + logger.add("file.log", format="{extra[ip]} {extra[user]} {message}") + context_logger = logger.bind(ip="192.168.0.1", user="someone") + context_logger.info("Contextualize your logger easily") + context_logger.bind(user="someone_else").info("Inline binding of extra attribute") + context_logger.info("Use kwargs to add context during formatting: {user}", user="anybody") + +It is possible to modify a context-local state temporarily with |contextualize|_: + +:: + + with logger.contextualize(task=task_id): + do_something() + logger.info("End of task") + +You can also have more fine-grained control over your logs by combining |bind|_ and ``filter``: + +:: + + logger.add("special.log", filter=lambda record: "special" in record["extra"]) + logger.debug("This message is not logged to the file") + logger.bind(special=True).info("This message, though, is logged to the file!") + +Finally, the |patch|_ method allows dynamic values to be attached to the record dict of each new message: + +:: + + logger.add(sys.stderr, format="{extra[utc]} {message}") + logger = logger.patch(lambda record: record["extra"].update(utc=datetime.utcnow())) + + +Lazy evaluation of expensive functions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Sometime you would like to log verbose information without performance penalty in production, you can use the |opt|_ method to achieve this. + +:: + + logger.opt(lazy=True).debug("If sink level <= DEBUG: {x}", x=lambda: expensive_function(2**64)) + + # By the way, "opt()" serves many usages + logger.opt(exception=True).info("Error stacktrace added to the log message (tuple accepted too)") + logger.opt(colors=True).info("Per message colors") + logger.opt(record=True).info("Display values from the record (eg. {record[thread]})") + logger.opt(raw=True).info("Bypass sink formatting\n") + logger.opt(depth=1).info("Use parent stack context (useful within wrapped functions)") + logger.opt(capture=False).info("Keyword arguments not added to {dest} dict", dest="extra") + + +Customizable levels +^^^^^^^^^^^^^^^^^^^ + +`Loguru` comes with all standard `logging levels`_ to which |trace|_ and |success|_ are added. Do you need more? Then, just create it by using the |level|_ function. + +:: + + new_level = logger.level("SNAKY", no=38, color="", icon="🐍") + + logger.log("SNAKY", "Here we go!") + + +Better datetime handling +^^^^^^^^^^^^^^^^^^^^^^^^ + +The standard logging is bloated with arguments like ``datefmt`` or ``msecs``, ``%(asctime)s`` and ``%(created)s``, naive datetimes without timezone information, not intuitive formatting, etc. `Loguru` `fixes it`_: + +:: + + logger.add("file.log", format="{time:YYYY-MM-DD at HH:mm:ss} | {level} | {message}") + + +Suitable for scripts and libraries +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Using the logger in your scripts is easy, and you can |configure|_ it at start. To use `Loguru` from inside a library, remember to never call |add|_ but use |disable|_ instead so logging functions become no-op. If a developer wishes to see your library's logs, he can |enable|_ it again. + +:: + + # For scripts + config = { + "handlers": [ + {"sink": sys.stdout, "format": "{time} - {message}"}, + {"sink": "file.log", "serialize": True}, + ], + "extra": {"user": "someone"} + } + logger.configure(**config) + + # For libraries + logger.disable("my_library") + logger.info("No matter added sinks, this message is not displayed") + logger.enable("my_library") + logger.info("This message however is propagated to the sinks") + + +Entirely compatible with standard logging +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Wish to use built-in logging ``Handler`` as a `Loguru` sink? + +:: + + handler = logging.handlers.SysLogHandler(address=('localhost', 514)) + logger.add(handler) + +Need to propagate `Loguru` messages to standard `logging`? + +:: + + class PropagateHandler(logging.Handler): + def emit(self, record): + logging.getLogger(record.name).handle(record) + + logger.add(PropagateHandler(), format="{message}") + +Want to intercept standard `logging` messages toward your `Loguru` sinks? + +:: + + class InterceptHandler(logging.Handler): + def emit(self, record): + # Get corresponding Loguru level if it exists + try: + level = logger.level(record.levelname).name + except ValueError: + level = record.levelno + + # Find caller from where originated the logged message + frame, depth = logging.currentframe(), 2 + while frame.f_code.co_filename == logging.__file__: + frame = frame.f_back + depth += 1 + + logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage()) + + logging.basicConfig(handlers=[InterceptHandler()], level=0) + + +Personalizable defaults through environment variables +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Don't like the default logger formatting? Would prefer another ``DEBUG`` color? `No problem`_:: + + # Linux / OSX + export LOGURU_FORMAT="{time} | {message}" + + # Windows + setx LOGURU_DEBUG_COLOR "" + + +Convenient parser +^^^^^^^^^^^^^^^^^ + +It is often useful to extract specific information from generated logs, this is why `Loguru` provides a |parse|_ method which helps to deal with logs and regexes. + +:: + + pattern = r"(?P